mirror of
https://github.com/home-assistant/core.git
synced 2025-11-26 11:08:01 +00:00
Compare commits
1 Commits
tibber_dat
...
blueprint-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84e4a0f22e |
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -88,9 +88,13 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -223,7 +227,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -261,7 +265,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -305,7 +309,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@@ -414,7 +418,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
@@ -459,7 +463,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
|
||||
40
.github/workflows/wheels.yml
vendored
40
.github/workflows/wheels.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -77,8 +77,20 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: &actions-upload-artifact actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
@@ -87,6 +99,13 @@ jobs:
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
@@ -119,6 +138,13 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
exclude:
|
||||
- abi: cp314
|
||||
arch: armv7
|
||||
- abi: cp314
|
||||
arch: armhf
|
||||
- abi: cp314
|
||||
arch: i386
|
||||
steps:
|
||||
- *checkout
|
||||
|
||||
@@ -128,6 +154,12 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- &download-build-constraints
|
||||
name: Download build_constraints
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- &download-requirements-diff
|
||||
name: Download requirements_diff
|
||||
uses: *actions-download-artifact
|
||||
@@ -167,7 +199,7 @@ jobs:
|
||||
- *checkout
|
||||
|
||||
- *download-env-file
|
||||
|
||||
- *download-build-constraints
|
||||
- *download-requirements-diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
@@ -177,6 +209,10 @@ jobs:
|
||||
|
||||
- name: Adjust build env
|
||||
run: |
|
||||
if [ "${{ matrix.arch }}" = "i386" ]; then
|
||||
echo "NPY_DISABLE_SVML=1" >> .env_file
|
||||
fi
|
||||
|
||||
# Do not pin numpy in wheels building
|
||||
sed -i "/numpy/d" homeassistant/package_constraints.txt
|
||||
# Don't build wheels for uv as uv requires a greater version of rust as currently available on alpine
|
||||
|
||||
@@ -87,7 +87,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(conditions|quality_scale|services|triggers)\.yaml|homeassistant/brands/.*\.json|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
|
||||
@@ -579,7 +579,6 @@ homeassistant.components.wiz.*
|
||||
homeassistant.components.wled.*
|
||||
homeassistant.components.workday.*
|
||||
homeassistant.components.worldclock.*
|
||||
homeassistant.components.xbox.*
|
||||
homeassistant.components.xiaomi_ble.*
|
||||
homeassistant.components.yale_smart_alarm.*
|
||||
homeassistant.components.yalexs_ble.*
|
||||
|
||||
10
CODEOWNERS
generated
10
CODEOWNERS
generated
@@ -69,8 +69,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/airly/ @bieniu
|
||||
/homeassistant/components/airnow/ @asymworks
|
||||
/tests/components/airnow/ @asymworks
|
||||
/homeassistant/components/airobot/ @mettolen
|
||||
/tests/components/airobot/ @mettolen
|
||||
/homeassistant/components/airos/ @CoMPaTech
|
||||
/tests/components/airos/ @CoMPaTech
|
||||
/homeassistant/components/airq/ @Sibgatulin @dl2080
|
||||
@@ -391,8 +389,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
@@ -631,8 +627,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/guardian/ @bachya
|
||||
/homeassistant/components/habitica/ @tr4nt0r
|
||||
/tests/components/habitica/ @tr4nt0r
|
||||
/homeassistant/components/hanna/ @bestycame
|
||||
/tests/components/hanna/ @bestycame
|
||||
/homeassistant/components/hardkernel/ @home-assistant/core
|
||||
/tests/components/hardkernel/ @home-assistant/core
|
||||
/homeassistant/components/hardware/ @home-assistant/core
|
||||
@@ -852,8 +846,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/kraken/ @eifinger
|
||||
/homeassistant/components/kulersky/ @emlove
|
||||
/tests/components/kulersky/ @emlove
|
||||
/homeassistant/components/labs/ @home-assistant/core
|
||||
/tests/components/labs/ @home-assistant/core
|
||||
/homeassistant/components/lacrosse_view/ @IceBotYT
|
||||
/tests/components/lacrosse_view/ @IceBotYT
|
||||
/homeassistant/components/lamarzocco/ @zweckj
|
||||
@@ -1744,8 +1736,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -21,9 +21,11 @@ ARG BUILD_ARCH
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
"armhf") go2rtc_suffix='armv6' ;; \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -176,8 +176,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
STAGE_0_INTEGRATIONS = (
|
||||
# Load logging and http deps as soon as possible
|
||||
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
|
||||
# Setup labs for preview features
|
||||
("labs", {"labs"}, STAGE_0_SUBSTAGE_TIMEOUT),
|
||||
# Setup frontend
|
||||
("frontend", FRONTEND_INTEGRATIONS, None),
|
||||
# Setup recorder
|
||||
@@ -214,7 +212,6 @@ DEFAULT_INTEGRATIONS = {
|
||||
"backup",
|
||||
"frontend",
|
||||
"hardware",
|
||||
"labs",
|
||||
"logger",
|
||||
"network",
|
||||
"system_health",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "victron",
|
||||
"name": "Victron",
|
||||
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
"""The Actron Air integration."""
|
||||
|
||||
from actron_neo_api import (
|
||||
ActronAirACSystem,
|
||||
ActronAirAPI,
|
||||
ActronAirAPIError,
|
||||
ActronAirAuthError,
|
||||
ActronAirNeoACSystem,
|
||||
ActronNeoAPI,
|
||||
ActronNeoAPIError,
|
||||
ActronNeoAuthError,
|
||||
)
|
||||
|
||||
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||
@@ -23,16 +23,16 @@ PLATFORM = [Platform.CLIMATE]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Set up Actron Air integration from a config entry."""
|
||||
|
||||
api = ActronAirAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirACSystem] = []
|
||||
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||
systems: list[ActronAirNeoACSystem] = []
|
||||
|
||||
try:
|
||||
systems = await api.get_ac_systems()
|
||||
await api.update_status()
|
||||
except ActronAirAuthError:
|
||||
except ActronNeoAuthError:
|
||||
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||
raise
|
||||
except ActronAirAPIError as err:
|
||||
except ActronNeoAPIError as err:
|
||||
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||
raise
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirStatus, ActronAirZone
|
||||
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
@@ -132,7 +132,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
||||
return self._status.max_temp
|
||||
|
||||
@property
|
||||
def _status(self) -> ActronAirStatus:
|
||||
def _status(self) -> ActronAirNeoStatus:
|
||||
"""Get the current status from the coordinator."""
|
||||
return self.coordinator.data
|
||||
|
||||
@@ -194,7 +194,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
zone: ActronAirZone,
|
||||
zone: ActronAirNeoZone,
|
||||
) -> None:
|
||||
"""Initialize an Actron Air unit."""
|
||||
super().__init__(coordinator, zone.title)
|
||||
@@ -221,7 +221,7 @@ class ActronZoneClimate(BaseClimateEntity):
|
||||
return self._zone.max_temp
|
||||
|
||||
@property
|
||||
def _zone(self) -> ActronAirZone:
|
||||
def _zone(self) -> ActronAirNeoZone:
|
||||
"""Get the current zone data from the coordinator."""
|
||||
status = self.coordinator.data
|
||||
return status.zones[self._zone_id]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from actron_neo_api import ActronAirAPI, ActronAirAuthError
|
||||
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
@@ -17,7 +17,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._api: ActronAirAPI | None = None
|
||||
self._api: ActronNeoAPI | None = None
|
||||
self._device_code: str | None = None
|
||||
self._user_code: str = ""
|
||||
self._verification_uri: str = ""
|
||||
@@ -30,10 +30,10 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
if self._api is None:
|
||||
_LOGGER.debug("Initiating device authorization")
|
||||
self._api = ActronAirAPI()
|
||||
self._api = ActronNeoAPI()
|
||||
try:
|
||||
device_code_response = await self._api.request_device_code()
|
||||
except ActronAirAuthError as err:
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
@@ -50,7 +50,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await self._api.poll_for_token(self._device_code)
|
||||
_LOGGER.debug("Authorization successful")
|
||||
except ActronAirAuthError as ex:
|
||||
except ActronNeoAuthError as ex:
|
||||
_LOGGER.exception("Error while waiting for device authorization")
|
||||
raise CannotConnect from ex
|
||||
|
||||
@@ -89,7 +89,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
user_data = await self._api.get_user_info()
|
||||
except ActronAirAuthError as err:
|
||||
except ActronNeoAuthError as err:
|
||||
_LOGGER.error("Error getting user info: %s", err)
|
||||
return self.async_abort(reason="oauth2_error")
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from actron_neo_api import ActronAirACSystem, ActronAirAPI, ActronAirStatus
|
||||
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ ERROR_UNKNOWN = "unknown_error"
|
||||
class ActronAirRuntimeData:
|
||||
"""Runtime data for the Actron Air integration."""
|
||||
|
||||
api: ActronAirAPI
|
||||
api: ActronNeoAPI
|
||||
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||
|
||||
|
||||
@@ -33,15 +33,15 @@ AUTH_ERROR_THRESHOLD = 3
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||
"""System coordinator for Actron Air integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
api: ActronAirAPI,
|
||||
system: ActronAirACSystem,
|
||||
api: ActronNeoAPI,
|
||||
system: ActronAirNeoACSystem,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
@@ -57,7 +57,7 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirACSystem]):
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
self.last_seen = dt_util.utcnow()
|
||||
|
||||
async def _async_update_data(self) -> ActronAirStatus:
|
||||
async def _async_update_data(self) -> ActronAirNeoStatus:
|
||||
"""Fetch updates and merge incremental changes into the full state."""
|
||||
await self.api.update_status()
|
||||
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.1.87"]
|
||||
"requirements": ["actron-neo-api==0.1.84"]
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["adguardhome"],
|
||||
"requirements": ["adguardhome==0.8.1"]
|
||||
"requirements": ["adguardhome==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
"""AdGuard Home Update platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adguardhome import AdGuardHomeError
|
||||
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdGuardConfigEntry, AdGuardData
|
||||
from .const import DOMAIN
|
||||
from .entity import AdGuardHomeEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdGuardConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AdGuard Home update entity based on a config entry."""
|
||||
data = entry.runtime_data
|
||||
|
||||
if (await data.client.update.update_available()).disabled:
|
||||
return
|
||||
|
||||
async_add_entities([AdGuardHomeUpdate(data, entry)], True)
|
||||
|
||||
|
||||
class AdGuardHomeUpdate(AdGuardHomeEntity, UpdateEntity):
|
||||
"""Defines an AdGuard Home update."""
|
||||
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: AdGuardData,
|
||||
entry: AdGuardConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home update."""
|
||||
super().__init__(data, entry)
|
||||
|
||||
self._attr_unique_id = "_".join(
|
||||
[DOMAIN, self.adguard.host, str(self.adguard.port), "update"]
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
value = await self.adguard.update.update_available()
|
||||
self._attr_installed_version = self.data.version
|
||||
self._attr_latest_version = value.new_version
|
||||
self._attr_release_summary = value.announcement
|
||||
self._attr_release_url = value.announcement_url
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
"""Install latest update."""
|
||||
try:
|
||||
await self.adguard.update.begin_update()
|
||||
except AdGuardHomeError as err:
|
||||
raise HomeAssistantError(f"Failed to install update: {err}") from err
|
||||
self.hass.config_entries.async_schedule_reload(self._entry.entry_id)
|
||||
@@ -1,29 +0,0 @@
|
||||
"""The Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Set up Airobot from a config entry."""
|
||||
coordinator = AirobotDataUpdateCoordinator(hass, entry)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
@@ -1,151 +0,0 @@
|
||||
"""Climate platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.const import (
|
||||
MODE_AWAY,
|
||||
MODE_HOME,
|
||||
SETPOINT_TEMP_MAX,
|
||||
SETPOINT_TEMP_MIN,
|
||||
)
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
PRESET_AWAY,
|
||||
PRESET_BOOST,
|
||||
PRESET_HOME,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
_PRESET_MODE_2_MODE = {
|
||||
PRESET_AWAY: MODE_AWAY,
|
||||
PRESET_HOME: MODE_HOME,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot climate platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities([AirobotClimate(coordinator)])
|
||||
|
||||
|
||||
class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
"""Representation of an Airobot thermostat."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "thermostat"
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [HVACMode.HEAT]
|
||||
_attr_preset_modes = [PRESET_HOME, PRESET_AWAY, PRESET_BOOST]
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_attr_min_temp = SETPOINT_TEMP_MIN
|
||||
_attr_max_temp = SETPOINT_TEMP_MAX
|
||||
|
||||
@property
|
||||
def _status(self) -> ThermostatStatus:
|
||||
"""Get status from coordinator data."""
|
||||
return self.coordinator.data.status
|
||||
|
||||
@property
|
||||
def _settings(self) -> ThermostatSettings:
|
||||
"""Get settings from coordinator data."""
|
||||
return self.coordinator.data.settings
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
if self._settings.is_home_mode:
|
||||
return self._settings.setpoint_temp
|
||||
return self._settings.setpoint_temp_away
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return current HVAC mode."""
|
||||
if self._status.is_heating:
|
||||
return HVACMode.HEAT
|
||||
return HVACMode.OFF
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction:
|
||||
"""Return current HVAC action."""
|
||||
if self._status.is_heating:
|
||||
return HVACAction.HEATING
|
||||
return HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return current preset mode."""
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
return PRESET_BOOST
|
||||
if self._settings.is_home_mode:
|
||||
return PRESET_HOME
|
||||
return PRESET_AWAY
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
temperature = kwargs[ATTR_TEMPERATURE]
|
||||
|
||||
try:
|
||||
if self._settings.is_home_mode:
|
||||
await self.coordinator.client.set_home_temperature(float(temperature))
|
||||
else:
|
||||
await self.coordinator.client.set_away_temperature(float(temperature))
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_temperature_failed",
|
||||
translation_placeholders={"temperature": str(temperature)},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
if preset_mode == PRESET_BOOST:
|
||||
# Enable boost mode
|
||||
if not self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(True)
|
||||
else:
|
||||
# Disable boost mode if it's enabled
|
||||
if self._settings.setting_flags.boost_enabled:
|
||||
await self.coordinator.client.set_boost_mode(False)
|
||||
|
||||
# Set the mode (HOME or AWAY)
|
||||
await self.coordinator.client.set_mode(_PRESET_MODE_2_MODE[preset_mode])
|
||||
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_preset_mode_failed",
|
||||
translation_placeholders={"preset_mode": preset_mode},
|
||||
) from err
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -1,183 +0,0 @@
|
||||
"""Config flow for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import (
|
||||
AirobotAuthError,
|
||||
AirobotConnectionError,
|
||||
AirobotError,
|
||||
AirobotTimeoutError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow as BaseConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DeviceInfo:
|
||||
"""Device information."""
|
||||
|
||||
title: str
|
||||
device_id: str
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> DeviceInfo:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
client = AirobotClient(
|
||||
host=data[CONF_HOST],
|
||||
username=data[CONF_USERNAME],
|
||||
password=data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
# Try to fetch data to validate connection and authentication
|
||||
status = await client.get_statuses()
|
||||
settings = await client.get_settings()
|
||||
except AirobotAuthError as err:
|
||||
raise InvalidAuth from err
|
||||
except (AirobotConnectionError, AirobotTimeoutError, AirobotError) as err:
|
||||
raise CannotConnect from err
|
||||
|
||||
# Use device name or device ID as title
|
||||
title = settings.device_name or status.device_id
|
||||
|
||||
return DeviceInfo(title=title, device_id=status.device_id)
|
||||
|
||||
|
||||
class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airobot."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered_host: str | None = None
|
||||
self._discovered_mac: str | None = None
|
||||
self._discovered_device_id: str | None = None
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
# Store the discovered IP address and MAC
|
||||
self._discovered_host = discovery_info.ip
|
||||
self._discovered_mac = discovery_info.macaddress
|
||||
|
||||
# Extract device_id from hostname (format: airobot-thermostat-t01xxxxxx)
|
||||
hostname = discovery_info.hostname.lower()
|
||||
device_id = hostname.replace("airobot-thermostat-", "").upper()
|
||||
self._discovered_device_id = device_id
|
||||
# Set unique_id to device_id for duplicate detection
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.ip})
|
||||
|
||||
# Show the confirmation form
|
||||
return await self.async_step_dhcp_confirm()
|
||||
|
||||
async def async_step_dhcp_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery confirmation - ask for credentials only."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
# Combine discovered host and device_id with user-provided password
|
||||
data = {
|
||||
CONF_HOST: self._discovered_host,
|
||||
CONF_USERNAME: self._discovered_device_id,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
info = await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Store MAC address in config entry data
|
||||
if self._discovered_mac:
|
||||
data[CONF_MAC] = self._discovered_mac
|
||||
|
||||
return self.async_create_entry(title=info.title, data=data)
|
||||
|
||||
# Only ask for password since we already have the device_id from discovery
|
||||
return self.async_show_form(
|
||||
step_id="dhcp_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"host": self._discovered_host or "",
|
||||
"device_id": self._discovered_device_id or "",
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
# Use device ID as unique ID to prevent duplicates
|
||||
await self.async_set_unique_id(info.device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=info.title, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Airobot integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "airobot"
|
||||
@@ -1,59 +0,0 @@
|
||||
"""Coordinator for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyairobotrest import AirobotClient
|
||||
from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .models import AirobotData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Update interval - thermostat measures air every 30 seconds
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
type AirobotConfigEntry = ConfigEntry[AirobotDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
"""Class to manage fetching Airobot data."""
|
||||
|
||||
config_entry: AirobotConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AirobotConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
self.client = AirobotClient(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirobotData:
|
||||
"""Fetch data from API endpoint."""
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
@@ -1,42 +0,0 @@
|
||||
"""Base entity for Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import CONF_MAC
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirobotEntity(CoordinatorEntity[AirobotDataUpdateCoordinator]):
|
||||
"""Base class for Airobot entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
status = coordinator.data.status
|
||||
settings = coordinator.data.settings
|
||||
|
||||
self._attr_unique_id = status.device_id
|
||||
|
||||
connections = set()
|
||||
if (mac := coordinator.config_entry.data.get(CONF_MAC)) is not None:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, status.device_id)},
|
||||
connections=connections,
|
||||
name=settings.device_name or status.device_id,
|
||||
manufacturer="Airobot",
|
||||
model="Thermostat",
|
||||
model_id="TE1",
|
||||
sw_version=str(status.fw_version),
|
||||
hw_version=str(status.hw_version),
|
||||
)
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"domain": "airobot",
|
||||
"name": "Airobot",
|
||||
"codeowners": ["@mettolen"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "airobot-thermostat-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airobot",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
"""Models for the Airobot integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.models import ThermostatSettings, ThermostatStatus
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirobotData:
|
||||
"""Data from the Airobot coordinator."""
|
||||
|
||||
status: ThermostatStatus
|
||||
settings: ThermostatSettings
|
||||
@@ -1,70 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Integration does not use event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -1,44 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"dhcp_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The thermostat password."
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
"password": "The thermostat password.",
|
||||
"username": "The thermostat Device ID (e.g., T01XXXXXX)."
|
||||
},
|
||||
"description": "Enter your Airobot thermostat connection details. Find the Device ID and password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -45,7 +45,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
data[CONF_PASSWORD],
|
||||
)
|
||||
|
||||
return await api.login.login_mode_interactive(data[CONF_CODE])
|
||||
return await api.login_mode_interactive(data[CONF_CODE])
|
||||
|
||||
|
||||
class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -59,7 +59,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login.login_mode_stored_data()
|
||||
await self.api.login_mode_stored_data()
|
||||
data = await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.2"]
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -18,7 +20,7 @@ from .analytics import (
|
||||
EntityAnalyticsModifications,
|
||||
async_devices_payload,
|
||||
)
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
__all__ = [
|
||||
@@ -29,43 +31,40 @@ __all__ = [
|
||||
"async_devices_payload",
|
||||
]
|
||||
|
||||
CONF_SNAPSHOTS_URL = "snapshots_url"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SNAPSHOTS_URL): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
DATA_COMPONENT: HassKey[Analytics] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
"""Set up the analytics integration."""
|
||||
analytics_config = config.get(DOMAIN, {})
|
||||
|
||||
# For now we want to enable device analytics only if the url option
|
||||
# is explicitly listed in YAML.
|
||||
if CONF_SNAPSHOTS_URL in analytics_config:
|
||||
disable_snapshots = False
|
||||
snapshots_url = analytics_config[CONF_SNAPSHOTS_URL]
|
||||
else:
|
||||
disable_snapshots = True
|
||||
snapshots_url = None
|
||||
|
||||
analytics = Analytics(hass, snapshots_url, disable_snapshots)
|
||||
analytics = Analytics(hass)
|
||||
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
@callback
|
||||
def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
await analytics.async_schedule()
|
||||
# Wait 15 min after started
|
||||
async_call_later(
|
||||
hass,
|
||||
900,
|
||||
HassJob(
|
||||
analytics.send_analytics,
|
||||
name="analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Send every day
|
||||
async_track_time_interval(
|
||||
hass,
|
||||
analytics.send_analytics,
|
||||
INTERVAL,
|
||||
name="analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
@@ -112,7 +111,7 @@ async def websocket_analytics_preferences(
|
||||
analytics = hass.data[DATA_COMPONENT]
|
||||
|
||||
await analytics.save_preferences(preferences)
|
||||
await analytics.async_schedule()
|
||||
await analytics.send_analytics()
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
|
||||
@@ -7,8 +7,6 @@ from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
import random
|
||||
import time
|
||||
from typing import Any, Protocol
|
||||
import uuid
|
||||
|
||||
@@ -33,18 +31,10 @@ from homeassistant.const import (
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
HomeAssistant,
|
||||
ReleaseChannel,
|
||||
callback,
|
||||
get_release_channel,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -59,6 +49,8 @@ from homeassistant.loader import (
|
||||
from homeassistant.setup import async_get_loaded_integrations
|
||||
|
||||
from .const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ATTR_ADDON_COUNT,
|
||||
ATTR_ADDONS,
|
||||
ATTR_ARCH,
|
||||
@@ -79,7 +71,6 @@ from .const import (
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RECORDER,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_STATE_COUNT,
|
||||
ATTR_STATISTICS,
|
||||
ATTR_SUPERVISOR,
|
||||
@@ -88,15 +79,9 @@ from .const import (
|
||||
ATTR_USER_COUNT,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
BASIC_ENDPOINT_URL,
|
||||
BASIC_ENDPOINT_URL_DEV,
|
||||
DOMAIN,
|
||||
INTERVAL,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
SNAPSHOT_DEFAULT_URL,
|
||||
SNAPSHOT_URL_PATH,
|
||||
SNAPSHOT_VERSION,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
@@ -209,18 +194,13 @@ def gen_uuid() -> str:
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
RELEASE_CHANNEL = get_release_channel()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Analytics data."""
|
||||
|
||||
onboarded: bool
|
||||
preferences: dict[str, bool]
|
||||
uuid: str | None = None
|
||||
submission_identifier: str | None = None
|
||||
snapshot_submission_time: float | None = None
|
||||
uuid: str | None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
|
||||
@@ -229,44 +209,29 @@ class AnalyticsData:
|
||||
data["onboarded"],
|
||||
data["preferences"],
|
||||
data["uuid"],
|
||||
data.get("submission_identifier"),
|
||||
data.get("snapshot_submission_time"),
|
||||
)
|
||||
|
||||
|
||||
class Analytics:
|
||||
"""Analytics helper class for the analytics integration."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
snapshots_url: str | None = None,
|
||||
disable_snapshots: bool = False,
|
||||
) -> None:
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the Analytics class."""
|
||||
self._hass: HomeAssistant = hass
|
||||
self._snapshots_url = snapshots_url
|
||||
self._disable_snapshots = disable_snapshots
|
||||
|
||||
self._session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {})
|
||||
self.hass: HomeAssistant = hass
|
||||
self.session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {}, None)
|
||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||
self._snapshot_scheduled: CALLBACK_TYPE | None = None
|
||||
|
||||
@property
|
||||
def preferences(self) -> dict:
|
||||
"""Return the current active preferences."""
|
||||
preferences = self._data.preferences
|
||||
result = {
|
||||
return {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
}
|
||||
if not self._disable_snapshots:
|
||||
result[ATTR_SNAPSHOTS] = preferences.get(ATTR_SNAPSHOTS, False)
|
||||
return result
|
||||
|
||||
@property
|
||||
def onboarded(self) -> bool:
|
||||
@@ -279,17 +244,17 @@ class Analytics:
|
||||
return self._data.uuid
|
||||
|
||||
@property
|
||||
def endpoint_basic(self) -> str:
|
||||
def endpoint(self) -> str:
|
||||
"""Return the endpoint that will receive the payload."""
|
||||
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||
if HA_VERSION.endswith("0.dev0"):
|
||||
# dev installations will contact the dev analytics environment
|
||||
return BASIC_ENDPOINT_URL_DEV
|
||||
return BASIC_ENDPOINT_URL
|
||||
return ANALYTICS_ENDPOINT_URL_DEV
|
||||
return ANALYTICS_ENDPOINT_URL
|
||||
|
||||
@property
|
||||
def supervisor(self) -> bool:
|
||||
"""Return bool if a supervisor is present."""
|
||||
return is_hassio(self._hass)
|
||||
return is_hassio(self.hass)
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load preferences."""
|
||||
@@ -299,7 +264,7 @@ class Analytics:
|
||||
|
||||
if (
|
||||
self.supervisor
|
||||
and (supervisor_info := hassio.get_supervisor_info(self._hass)) is not None
|
||||
and (supervisor_info := hassio.get_supervisor_info(self.hass)) is not None
|
||||
):
|
||||
if not self.onboarded:
|
||||
# User have not configured analytics, get this setting from the supervisor
|
||||
@@ -312,35 +277,32 @@ class Analytics:
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||
|
||||
async def _save(self) -> None:
|
||||
"""Save data."""
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
async def save_preferences(self, preferences: dict) -> None:
|
||||
"""Save preferences."""
|
||||
preferences = PREFERENCE_SCHEMA(preferences)
|
||||
self._data.preferences.update(preferences)
|
||||
self._data.onboarded = True
|
||||
|
||||
await self._save()
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
if self.supervisor:
|
||||
await hassio.async_update_diagnostics(
|
||||
self._hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
self.hass, self.preferences.get(ATTR_DIAGNOSTICS, False)
|
||||
)
|
||||
|
||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||
"""Send analytics."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
return
|
||||
|
||||
hass = self._hass
|
||||
hass = self.hass
|
||||
supervisor_info = None
|
||||
operating_system_info: dict[str, Any] = {}
|
||||
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Nothing to submit")
|
||||
return
|
||||
|
||||
if self._data.uuid is None:
|
||||
self._data.uuid = gen_uuid()
|
||||
await self._save()
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
if self.supervisor:
|
||||
supervisor_info = hassio.get_supervisor_info(hass)
|
||||
@@ -474,7 +436,7 @@ class Analytics:
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self._session.post(self.endpoint_basic, json=payload)
|
||||
response = await self.session.post(self.endpoint, json=payload)
|
||||
if response.status == 200:
|
||||
LOGGER.info(
|
||||
(
|
||||
@@ -487,12 +449,14 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Sending analytics failed with statuscode %s from %s",
|
||||
response.status,
|
||||
self.endpoint_basic,
|
||||
self.endpoint,
|
||||
)
|
||||
except TimeoutError:
|
||||
LOGGER.error("Timeout sending analytics to %s", BASIC_ENDPOINT_URL)
|
||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error("Error sending analytics to %s: %r", BASIC_ENDPOINT_URL, err)
|
||||
LOGGER.error(
|
||||
"Error sending analytics to %s: %r", ANALYTICS_ENDPOINT_URL, err
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_should_report_integration(
|
||||
@@ -516,7 +480,7 @@ class Analytics:
|
||||
if not integration.config_flow:
|
||||
return False
|
||||
|
||||
entries = self._hass.config_entries.async_entries(integration.domain)
|
||||
entries = self.hass.config_entries.async_entries(integration.domain)
|
||||
|
||||
# Filter out ignored and disabled entries
|
||||
return any(
|
||||
@@ -525,186 +489,6 @@ class Analytics:
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
async def send_snapshot(self, _: datetime | None = None) -> None:
|
||||
"""Send a snapshot."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||
return
|
||||
|
||||
payload = await _async_snapshot_payload(self._hass)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": f"home-assistant/{HA_VERSION}",
|
||||
}
|
||||
if self._data.submission_identifier is not None:
|
||||
headers["X-Device-Database-Submission-Identifier"] = (
|
||||
self._data.submission_identifier
|
||||
)
|
||||
|
||||
url = (
|
||||
self._snapshots_url
|
||||
if self._snapshots_url is not None
|
||||
else SNAPSHOT_DEFAULT_URL
|
||||
)
|
||||
url += SNAPSHOT_URL_PATH
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self._session.post(url, json=payload, headers=headers)
|
||||
|
||||
if response.status == 200: # OK
|
||||
response_data = await response.json()
|
||||
new_identifier = response_data.get("submission_identifier")
|
||||
|
||||
if (
|
||||
new_identifier is not None
|
||||
and new_identifier != self._data.submission_identifier
|
||||
):
|
||||
self._data.submission_identifier = new_identifier
|
||||
await self._save()
|
||||
|
||||
LOGGER.info(
|
||||
"Submitted snapshot analytics to Home Assistant servers"
|
||||
)
|
||||
|
||||
elif response.status == 400: # Bad Request
|
||||
response_data = await response.json()
|
||||
error_kind = response_data.get("kind", "unknown")
|
||||
error_message = response_data.get("message", "Unknown error")
|
||||
|
||||
if error_kind == "invalid-submission-identifier":
|
||||
# Clear the invalid identifier and retry on next cycle
|
||||
LOGGER.warning(
|
||||
"Invalid submission identifier to %s, clearing: %s",
|
||||
url,
|
||||
error_message,
|
||||
)
|
||||
self._data.submission_identifier = None
|
||||
await self._save()
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||
error_kind,
|
||||
url,
|
||||
error_message,
|
||||
)
|
||||
|
||||
elif response.status == 503: # Service Unavailable
|
||||
response_text = await response.text()
|
||||
LOGGER.warning(
|
||||
"Snapshot analytics service %s unavailable: %s",
|
||||
url,
|
||||
response_text,
|
||||
)
|
||||
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||
response.status,
|
||||
url,
|
||||
)
|
||||
|
||||
except TimeoutError:
|
||||
LOGGER.error(
|
||||
"Timeout sending snapshot analytics to %s",
|
||||
url,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending snapshot analytics to %s: %r",
|
||||
url,
|
||||
err,
|
||||
)
|
||||
|
||||
async def async_schedule(self) -> None:
|
||||
"""Schedule analytics."""
|
||||
if not self.onboarded:
|
||||
LOGGER.debug("Analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
return
|
||||
|
||||
if not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Basic analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
elif self._basic_scheduled is None:
|
||||
# Wait 15 min after started for basic analytics
|
||||
self._basic_scheduled = async_call_later(
|
||||
self._hass,
|
||||
900,
|
||||
HassJob(
|
||||
self._async_schedule_basic,
|
||||
name="basic analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or self._disable_snapshots:
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
elif self._snapshot_scheduled is None:
|
||||
snapshot_submission_time = self._data.snapshot_submission_time
|
||||
|
||||
interval_seconds = INTERVAL.total_seconds()
|
||||
|
||||
if snapshot_submission_time is None:
|
||||
# Randomize the submission time within the 24 hours
|
||||
snapshot_submission_time = random.uniform(0, interval_seconds)
|
||||
self._data.snapshot_submission_time = snapshot_submission_time
|
||||
await self._save()
|
||||
LOGGER.debug(
|
||||
"Initialized snapshot submission time to %s",
|
||||
snapshot_submission_time,
|
||||
)
|
||||
|
||||
# Calculate delay until next submission
|
||||
current_time = time.time()
|
||||
delay = (snapshot_submission_time - current_time) % interval_seconds
|
||||
|
||||
self._snapshot_scheduled = async_call_later(
|
||||
self._hass,
|
||||
delay,
|
||||
HassJob(
|
||||
self._async_schedule_snapshots,
|
||||
name="snapshot analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
|
||||
"""Schedule basic analytics."""
|
||||
await self.send_analytics()
|
||||
|
||||
# Send basic analytics every day
|
||||
self._basic_scheduled = async_track_time_interval(
|
||||
self._hass,
|
||||
self.send_analytics,
|
||||
INTERVAL,
|
||||
name="basic analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
|
||||
"""Schedule snapshot analytics."""
|
||||
await self.send_snapshot()
|
||||
|
||||
# Send snapshot analytics every day
|
||||
self._snapshot_scheduled = async_track_time_interval(
|
||||
self._hass,
|
||||
self.send_snapshot,
|
||||
INTERVAL,
|
||||
name="snapshot analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
@@ -721,8 +505,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices for a snapshot."""
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
@@ -927,13 +711,8 @@ async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return integrations_info
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices for a direct download."""
|
||||
return {
|
||||
"version": f"home-assistant:{SNAPSHOT_VERSION}",
|
||||
"version": "home-assistant:1",
|
||||
"home_assistant": HA_VERSION,
|
||||
"integrations": await _async_snapshot_payload(hass),
|
||||
"integrations": integrations_info,
|
||||
}
|
||||
|
||||
@@ -5,17 +5,13 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
DOMAIN = "analytics"
|
||||
INTERVAL = timedelta(days=1)
|
||||
STORAGE_KEY = "core.analytics"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
BASIC_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
BASIC_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
|
||||
SNAPSHOT_VERSION = 1
|
||||
SNAPSHOT_DEFAULT_URL = "https://device-database.eco-dev-aws.openhomefoundation.com"
|
||||
SNAPSHOT_URL_PATH = f"/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
|
||||
LOGGER: logging.Logger = logging.getLogger(__package__)
|
||||
|
||||
@@ -42,7 +38,6 @@ ATTR_PREFERENCES = "preferences"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_RECORDER = "recorder"
|
||||
ATTR_SLUG = "slug"
|
||||
ATTR_SNAPSHOTS = "snapshots"
|
||||
ATTR_STATE_COUNT = "state_count"
|
||||
ATTR_STATISTICS = "statistics"
|
||||
ATTR_SUPERVISOR = "supervisor"
|
||||
@@ -56,7 +51,6 @@ ATTR_VERSION = "version"
|
||||
PREFERENCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BASE): bool,
|
||||
vol.Optional(ATTR_SNAPSHOTS): bool,
|
||||
vol.Optional(ATTR_DIAGNOSTICS): bool,
|
||||
vol.Optional(ATTR_STATISTICS): bool,
|
||||
vol.Optional(ATTR_USAGE): bool,
|
||||
|
||||
@@ -392,7 +392,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
@@ -459,7 +459,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input={},
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
"requirements": ["anthropic==0.69.0"]
|
||||
}
|
||||
|
||||
@@ -7,26 +7,3 @@ CONNECTION_TIMEOUT: int = 10
|
||||
|
||||
# Field name of last self test retrieved from apcupsd.
|
||||
LAST_S_TEST: Final = "laststest"
|
||||
|
||||
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
|
||||
# repair issue translation keys.
|
||||
DEPRECATED_SENSORS: Final = {
|
||||
"apc": "apc_deprecated",
|
||||
"end apc": "date_deprecated",
|
||||
"date": "date_deprecated",
|
||||
"apcmodel": "available_via_device_info",
|
||||
"model": "available_via_device_info",
|
||||
"firmware": "available_via_device_info",
|
||||
"version": "available_via_device_info",
|
||||
"upsname": "available_via_device_info",
|
||||
"serialno": "available_via_device_info",
|
||||
}
|
||||
|
||||
AVAILABLE_VIA_DEVICE_ATTR: Final = {
|
||||
"apcmodel": "model",
|
||||
"model": "model",
|
||||
"firmware": "hw_version",
|
||||
"version": "sw_version",
|
||||
"upsname": "name",
|
||||
"serialno": "serial_number",
|
||||
}
|
||||
|
||||
@@ -4,8 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -24,11 +22,9 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
|
||||
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
|
||||
from .const import LAST_S_TEST
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
from .entity import APCUPSdEntity
|
||||
|
||||
@@ -532,62 +528,3 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||
if not self.native_unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = inferred_unit
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle when entity is added to Home Assistant.
|
||||
|
||||
If this is a deprecated sensor entity, create a repair issue to guide
|
||||
the user to disable it.
|
||||
"""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
|
||||
if not reason:
|
||||
return
|
||||
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
if not automations and not scripts:
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
items = [
|
||||
f"- [{entry.name or entry.original_name or entity_id}]"
|
||||
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
|
||||
for integration, entities in (
|
||||
("automation", automations),
|
||||
("script", scripts),
|
||||
)
|
||||
for entity_id in entities
|
||||
if (entry := entity_registry.async_get(entity_id))
|
||||
]
|
||||
placeholders = {
|
||||
"entity_name": str(self.name or self.entity_id),
|
||||
"entity_id": self.entity_id,
|
||||
"items": "\n".join(items),
|
||||
}
|
||||
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
|
||||
placeholders["available_via_device_attr"] = via_attr
|
||||
if device_entry := self.device_entry:
|
||||
placeholders["device_id"] = device_entry.id
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"{reason}_{self.entity_id}",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=reason,
|
||||
translation_placeholders=placeholders,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle when entity will be removed from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
|
||||
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")
|
||||
|
||||
@@ -241,19 +241,5 @@
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"apc_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"available_via_device_info": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"date_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["python_awair"],
|
||||
"requirements": ["python-awair==0.2.5"],
|
||||
"requirements": ["python-awair==0.2.4"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "awair*",
|
||||
|
||||
@@ -21,10 +21,10 @@ from .const import (
|
||||
ATTR_ITEM_NUMBER,
|
||||
ATTR_SERIAL_NUMBER,
|
||||
ATTR_TYPE_NUMBER,
|
||||
COMPATIBLE_MODELS,
|
||||
CONF_SERIAL_NUMBER,
|
||||
DEFAULT_MODEL,
|
||||
DOMAIN,
|
||||
SELECTABLE_MODELS,
|
||||
)
|
||||
from .util import get_serial_number_from_jid
|
||||
|
||||
@@ -70,7 +70,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_MODEL, default=DEFAULT_MODEL): SelectSelector(
|
||||
SelectSelectorConfig(options=SELECTABLE_MODELS)
|
||||
SelectSelectorConfig(options=COMPATIBLE_MODELS)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -62,7 +62,6 @@ class BangOlufsenMediaType(StrEnum):
|
||||
class BangOlufsenModel(StrEnum):
|
||||
"""Enum for compatible model names."""
|
||||
|
||||
# Mozart devices
|
||||
BEOCONNECT_CORE = "Beoconnect Core"
|
||||
BEOLAB_8 = "BeoLab 8"
|
||||
BEOLAB_28 = "BeoLab 28"
|
||||
@@ -72,26 +71,7 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOSOUND_BALANCE = "Beosound Balance"
|
||||
BEOSOUND_EMERGE = "Beosound Emerge"
|
||||
BEOSOUND_LEVEL = "Beosound Level"
|
||||
BEOSOUND_PREMIERE = "Beosound Premiere"
|
||||
BEOSOUND_THEATRE = "Beosound Theatre"
|
||||
# Remote devices
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
BLUETOOTH = "Bluetooth"
|
||||
MICROPHONE = "Microphone"
|
||||
NEXT = "Next"
|
||||
PLAY_PAUSE = "PlayPause"
|
||||
PRESET_1 = "Preset1"
|
||||
PRESET_2 = "Preset2"
|
||||
PRESET_3 = "Preset3"
|
||||
PRESET_4 = "Preset4"
|
||||
PREVIOUS = "Previous"
|
||||
VOLUME = "Volume"
|
||||
|
||||
|
||||
# Dispatcher events
|
||||
@@ -99,7 +79,6 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BEO_REMOTE_BUTTON = "beo_remote_button"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
@@ -117,7 +96,6 @@ class WebsocketNotification(StrEnum):
|
||||
BEOLINK_AVAILABLE_LISTENERS = "beolinkAvailableListeners"
|
||||
CONFIGURATION = "configuration"
|
||||
NOTIFICATION = "notification"
|
||||
REMOTE_CONTROL_DEVICES = "remoteControlDevices"
|
||||
REMOTE_MENU_CHANGED = "remoteMenuChanged"
|
||||
|
||||
ALL = "all"
|
||||
@@ -133,11 +111,7 @@ CONF_SERIAL_NUMBER: Final = "serial_number"
|
||||
CONF_BEOLINK_JID: Final = "jid"
|
||||
|
||||
# Models to choose from in manual configuration.
|
||||
SELECTABLE_MODELS: list[str] = [
|
||||
model.value for model in BangOlufsenModel if model != BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
|
||||
MANUFACTURER: Final[str] = "Bang & Olufsen"
|
||||
COMPATIBLE_MODELS: list[str] = [x.value for x in BangOlufsenModel]
|
||||
|
||||
# Attribute names for zeroconf discovery.
|
||||
ATTR_TYPE_NUMBER: Final[str] = "tn"
|
||||
@@ -230,16 +204,29 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
# Beoremote One
|
||||
"KeyPress": "key_press",
|
||||
"KeyRelease": "key_release",
|
||||
# Physical "buttons"
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
@@ -249,7 +236,18 @@ EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [x.value for x in BangOlufsenButtons]
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
@@ -260,70 +258,6 @@ DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
BEO_REMOTE_SUBMENU_CONTROL: Final[str] = "Control"
|
||||
BEO_REMOTE_SUBMENU_LIGHT: Final[str] = "Light"
|
||||
|
||||
# Common for both submenus
|
||||
BEO_REMOTE_KEYS: Final[tuple[str, ...]] = (
|
||||
"Blue",
|
||||
"Digit0",
|
||||
"Digit1",
|
||||
"Digit2",
|
||||
"Digit3",
|
||||
"Digit4",
|
||||
"Digit5",
|
||||
"Digit6",
|
||||
"Digit7",
|
||||
"Digit8",
|
||||
"Digit9",
|
||||
"Down",
|
||||
"Green",
|
||||
"Left",
|
||||
"Play",
|
||||
"Red",
|
||||
"Rewind",
|
||||
"Right",
|
||||
"Select",
|
||||
"Stop",
|
||||
"Up",
|
||||
"Wind",
|
||||
"Yellow",
|
||||
"Func1",
|
||||
"Func2",
|
||||
"Func3",
|
||||
"Func4",
|
||||
"Func5",
|
||||
"Func6",
|
||||
"Func7",
|
||||
"Func8",
|
||||
"Func9",
|
||||
"Func10",
|
||||
"Func11",
|
||||
"Func12",
|
||||
"Func13",
|
||||
"Func14",
|
||||
"Func15",
|
||||
"Func16",
|
||||
"Func17",
|
||||
)
|
||||
|
||||
# "keys" that are unique to the Control submenu
|
||||
BEO_REMOTE_CONTROL_KEYS: Final[tuple[str, ...]] = (
|
||||
"Func18",
|
||||
"Func19",
|
||||
"Func20",
|
||||
"Func21",
|
||||
"Func22",
|
||||
"Func23",
|
||||
"Func24",
|
||||
"Func25",
|
||||
"Func26",
|
||||
"Func27",
|
||||
)
|
||||
|
||||
BEO_REMOTE_KEY_EVENTS: Final[list[str]] = ["key_press", "key_release"]
|
||||
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
@@ -6,13 +6,11 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .util import get_device_buttons
|
||||
from .const import DEVICE_BUTTONS, DOMAIN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -42,7 +40,7 @@ async def async_get_config_entry_diagnostics(
|
||||
data["media_player"] = state_dict
|
||||
|
||||
# Add button Event entity states (if enabled)
|
||||
for device_button in get_device_buttons(config_entry.data[CONF_MODEL]):
|
||||
for device_button in DEVICE_BUTTONS:
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
|
||||
):
|
||||
|
||||
@@ -2,34 +2,22 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEY_EVENTS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
BangOlufsenModel,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
from .util import get_device_buttons, get_remotes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -39,87 +27,25 @@ async def async_setup_entry(
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Event entities from config entry."""
|
||||
entities: list[BangOlufsenEvent] = []
|
||||
"""Set up Sensor entities from config entry."""
|
||||
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
|
||||
)
|
||||
|
||||
# Check for connected Beoremote One
|
||||
remotes = await get_remotes(config_entry.runtime_data.client)
|
||||
|
||||
for remote in remotes:
|
||||
# Add Light keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
|
||||
)
|
||||
for key_type in BEO_REMOTE_KEYS
|
||||
]
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
)
|
||||
|
||||
# Add Control keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
|
||||
)
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
]
|
||||
)
|
||||
|
||||
# If the remote is no longer available, then delete the device.
|
||||
# The remote may appear as being available to the device after it has been unpaired on the remote
|
||||
# As it has to be removed from the device on the app.
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
devices = device_registry.devices.get_devices_for_config_entry_id(
|
||||
config_entry.entry_id
|
||||
)
|
||||
for device in devices:
|
||||
if (
|
||||
device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
and device.serial_number not in {remote.serial_number for remote in remotes}
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
async_add_entities(new_entities=entities)
|
||||
|
||||
|
||||
class BangOlufsenEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Base Event class."""
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry) -> None:
|
||||
"""Initialize Event."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEvent):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry)
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
@@ -133,65 +59,20 @@ class BangOlufsenButtonEvent(BangOlufsenEvent):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenRemoteKeyEvent(BangOlufsenEvent):
|
||||
"""Event class for Beoremote One key events."""
|
||||
|
||||
_attr_event_types = BEO_REMOTE_KEY_EVENTS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
remote: PairedRemote,
|
||||
key_type: str,
|
||||
) -> None:
|
||||
"""Initialize Beoremote One key."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert remote.serial_number
|
||||
|
||||
self._attr_unique_id = f"{remote.serial_number}_{self._unique_id}_{key_type}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
|
||||
name=f"{BangOlufsenModel.BEOREMOTE_ONE}-{remote.serial_number}-{self._unique_id}",
|
||||
model=BangOlufsenModel.BEOREMOTE_ONE,
|
||||
serial_number=remote.serial_number,
|
||||
sw_version=remote.app_version,
|
||||
manufacturer=MANUFACTURER,
|
||||
via_device=(DOMAIN, self._unique_id),
|
||||
)
|
||||
|
||||
# Make the native key name Home Assistant compatible
|
||||
self._attr_translation_key = key_type.lower().replace("/", "_")
|
||||
|
||||
self._key_type = key_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket Beoremote One key events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{self._key_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -1,278 +1,4 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"control_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func18": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func19": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func20": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func21": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func22": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func23": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func24": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func25": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func26": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func27": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_yellow": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_yellow": {
|
||||
"default": "mdi:remote"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
|
||||
@@ -80,7 +80,6 @@ from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
@@ -202,7 +201,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self._host}/#/",
|
||||
identifiers={(DOMAIN, self._unique_id)},
|
||||
manufacturer=MANUFACTURER,
|
||||
manufacturer="Bang & Olufsen",
|
||||
model=self._model,
|
||||
serial_number=self._unique_id,
|
||||
)
|
||||
@@ -250,7 +249,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{signal}",
|
||||
f"{self._unique_id}_{signal}",
|
||||
signal_handler,
|
||||
)
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,16 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import DEVICE_BUTTONS, DOMAIN, BangOlufsenButtons, BangOlufsenModel
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
@@ -26,30 +21,3 @@ def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
def get_serial_number_from_jid(jid: str) -> str:
|
||||
"""Get serial number from Beolink JID."""
|
||||
return jid.split(".")[2].split("@")[0]
|
||||
|
||||
|
||||
async def get_remotes(client: MozartClient) -> list[PairedRemote]:
|
||||
"""Get paired remotes."""
|
||||
|
||||
bluetooth_remote_list = await client.get_bluetooth_remotes()
|
||||
|
||||
return [
|
||||
remote
|
||||
for remote in cast(list[PairedRemote], bluetooth_remote_list.items)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
|
||||
|
||||
def get_device_buttons(model: BangOlufsenModel) -> list[str]:
|
||||
"""Get supported buttons for a given model."""
|
||||
buttons = DEVICE_BUTTONS.copy()
|
||||
|
||||
# Beosound Premiere does not have a bluetooth button
|
||||
if model == BangOlufsenModel.BEOSOUND_PREMIERE:
|
||||
buttons.remove(BangOlufsenButtons.BLUETOOTH)
|
||||
|
||||
# Beoconnect Core does not have any buttons
|
||||
elif model == BangOlufsenModel.BEOCONNECT_CORE:
|
||||
buttons = []
|
||||
|
||||
return buttons
|
||||
|
||||
@@ -6,7 +6,6 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
BeoRemoteButton,
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
@@ -29,13 +28,11 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
BangOlufsenModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
from .util import get_device, get_remotes
|
||||
from .util import get_device
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -60,9 +57,6 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_beo_remote_button_notifications(
|
||||
self.on_beo_remote_button_notification
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
@@ -93,7 +87,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Update all entities of the connection status."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._client.websocket_connected,
|
||||
)
|
||||
|
||||
@@ -111,22 +105,10 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send active_listening_mode dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
f"{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
|
||||
"""Send beo_remote_button dispatch."""
|
||||
if TYPE_CHECKING:
|
||||
assert notification.type
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{notification.key}",
|
||||
EVENT_TRANSLATION_MAP[notification.type],
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
@@ -136,11 +118,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
async def on_notification_notification(
|
||||
def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
"""Send notification dispatch."""
|
||||
@@ -154,51 +136,24 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
):
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
)
|
||||
|
||||
# This notification is triggered by a remote pairing, unpairing and connecting to a device
|
||||
# So the current remote devices have to be compared to available remotes to determine action
|
||||
elif notification_type is WebsocketNotification.REMOTE_CONTROL_DEVICES:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
# Get remote devices connected to the device from Home Assistant
|
||||
device_serial_numbers = [
|
||||
device.serial_number
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
self.entry.entry_id
|
||||
)
|
||||
if device.serial_number is not None
|
||||
and device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
# Get paired remotes from device
|
||||
remote_serial_numbers = [
|
||||
remote.serial_number
|
||||
for remote in await get_remotes(self._client)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
# Check if number of remote devices correspond to number of paired remotes
|
||||
if len(remote_serial_numbers) != len(device_serial_numbers):
|
||||
_LOGGER.info(
|
||||
"A Beoremote One has been paired or unpaired to %s. Reloading config entry to add device and entities",
|
||||
self.entry.title,
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(self.entry.entry_id)
|
||||
|
||||
def on_playback_error_notification(self, notification: PlaybackError) -> None:
|
||||
"""Send playback_error dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -208,7 +163,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_metadata dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -216,7 +171,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_progress dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -224,7 +179,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_state dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -232,7 +187,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -240,7 +195,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -248,7 +203,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send volume dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
f"{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
notification,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
"""The blueprint integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_NAME, CONF_SELECTOR
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.selector import selector as create_selector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
@@ -29,3 +35,61 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the blueprint integration."""
|
||||
websocket_api.async_setup(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_find_relevant_blueprints(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> dict[str, list[dict[str, Any]]]:
|
||||
"""Find all blueprints relevant to a specific device."""
|
||||
results = {}
|
||||
entities = [
|
||||
entry
|
||||
for entry in er.async_entries_for_device(er.async_get(hass), device_id)
|
||||
if not entry.entity_category
|
||||
]
|
||||
|
||||
async def all_blueprints_generator(hass: HomeAssistant):
|
||||
"""Yield all blueprints from all domains."""
|
||||
blueprint_domains: dict[str, DomainBlueprints] = hass.data[DOMAIN]
|
||||
for blueprint_domain in blueprint_domains.values():
|
||||
blueprints = await blueprint_domain.async_get_blueprints()
|
||||
for blueprint in blueprints.values():
|
||||
yield blueprint
|
||||
|
||||
async for blueprint in all_blueprints_generator(hass):
|
||||
blueprint_input_matches: dict[str, list[str]] = {}
|
||||
|
||||
for info in blueprint.inputs.values():
|
||||
if (
|
||||
not info
|
||||
or not (selector_conf := info.get(CONF_SELECTOR))
|
||||
or "entity" not in selector_conf
|
||||
):
|
||||
continue
|
||||
|
||||
selector = create_selector(selector_conf)
|
||||
|
||||
matched = []
|
||||
|
||||
for entity in entities:
|
||||
try:
|
||||
entity.entity_id, selector(entity.entity_id)
|
||||
except vol.Invalid:
|
||||
continue
|
||||
|
||||
matched.append(entity.entity_id)
|
||||
|
||||
if matched:
|
||||
blueprint_input_matches[info[CONF_NAME]] = matched
|
||||
|
||||
if not blueprint_input_matches:
|
||||
continue
|
||||
|
||||
results.setdefault(blueprint.domain, []).append(
|
||||
{
|
||||
"blueprint": blueprint,
|
||||
"matched_input": blueprint_input_matches,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==3.1.2",
|
||||
"dbus-fast==2.45.0",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model_id=coordinator.brother.model,
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
@@ -8,8 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["brother==6.0.0"],
|
||||
"requirements": ["brother==5.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "brother*",
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not register services.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: SNMP doesn't return error identifying an authentication problem, to change the SNMP community (simple password) the user should use reconfigure flow.
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The integration does not connect via HTTP instead it uses a shared SNMP engine.
|
||||
strict-typing: done
|
||||
@@ -17,7 +17,7 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@@ -345,10 +345,12 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the native value of the sensor."""
|
||||
return self.entity_description.value(self.coordinator.data)
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_native_value = self.entity_description.value(self.coordinator.data)
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Awaitable, Callable
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import cast
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
import voluptuous as vol
|
||||
@@ -86,10 +86,6 @@ SIGNAL_CLOUD_CONNECTION_STATE: SignalType[CloudConnectionState] = SignalType(
|
||||
"CLOUD_CONNECTION_STATE"
|
||||
)
|
||||
|
||||
_SIGNAL_CLOUDHOOKS_UPDATED: SignalType[dict[str, Any]] = SignalType(
|
||||
"CLOUDHOOKS_UPDATED"
|
||||
)
|
||||
|
||||
STARTUP_REPAIR_DELAY = 1 # 1 hour
|
||||
|
||||
ALEXA_ENTITY_SCHEMA = vol.Schema(
|
||||
@@ -246,24 +242,6 @@ async def async_delete_cloudhook(hass: HomeAssistant, webhook_id: str) -> None:
|
||||
await hass.data[DATA_CLOUD].cloudhooks.async_delete(webhook_id)
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen_cloudhook_change(
|
||||
hass: HomeAssistant,
|
||||
webhook_id: str,
|
||||
on_change: Callable[[dict[str, Any] | None], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for cloudhook changes for the given webhook and notify when modified or deleted."""
|
||||
|
||||
@callback
|
||||
def _handle_cloudhooks_updated(cloudhooks: dict[str, Any]) -> None:
|
||||
"""Handle cloudhooks updated signal."""
|
||||
on_change(cloudhooks.get(webhook_id))
|
||||
|
||||
return async_dispatcher_connect(
|
||||
hass, _SIGNAL_CLOUDHOOKS_UPDATED, _handle_cloudhooks_updated
|
||||
)
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def async_remote_ui_url(hass: HomeAssistant) -> str:
|
||||
@@ -311,7 +289,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
|
||||
_handle_prefs_updated(hass, cloud)
|
||||
_remote_handle_prefs_updated(cloud)
|
||||
_setup_services(hass, prefs)
|
||||
|
||||
async def async_startup_repairs(_: datetime) -> None:
|
||||
@@ -395,32 +373,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
|
||||
@callback
|
||||
def _handle_prefs_updated(hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None:
|
||||
"""Register handler for cloud preferences updates."""
|
||||
cur_remote_enabled = cloud.client.prefs.remote_enabled
|
||||
cur_cloudhooks = cloud.client.prefs.cloudhooks
|
||||
def _remote_handle_prefs_updated(cloud: Cloud[CloudClient]) -> None:
|
||||
"""Handle remote preferences updated."""
|
||||
cur_pref = cloud.client.prefs.remote_enabled
|
||||
lock = asyncio.Lock()
|
||||
|
||||
async def on_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Handle cloud preferences updates."""
|
||||
nonlocal cur_remote_enabled
|
||||
nonlocal cur_cloudhooks
|
||||
# Sync remote connection with prefs
|
||||
async def remote_prefs_updated(prefs: CloudPreferences) -> None:
|
||||
"""Update remote status."""
|
||||
nonlocal cur_pref
|
||||
|
||||
# Lock protects cur_ state variables from concurrent updates
|
||||
async with lock:
|
||||
if cur_cloudhooks != prefs.cloudhooks:
|
||||
cur_cloudhooks = prefs.cloudhooks
|
||||
async_dispatcher_send(hass, _SIGNAL_CLOUDHOOKS_UPDATED, cur_cloudhooks)
|
||||
|
||||
if prefs.remote_enabled == cur_remote_enabled:
|
||||
if prefs.remote_enabled == cur_pref:
|
||||
return
|
||||
|
||||
if cur_remote_enabled := prefs.remote_enabled:
|
||||
if cur_pref := prefs.remote_enabled:
|
||||
await cloud.remote.connect()
|
||||
else:
|
||||
await cloud.remote.disconnect()
|
||||
|
||||
cloud.client.prefs.async_listen_updates(on_prefs_updated)
|
||||
cloud.client.prefs.async_listen_updates(remote_prefs_updated)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_area)
|
||||
websocket_api.async_register_command(hass, websocket_delete_area)
|
||||
websocket_api.async_register_command(hass, websocket_update_area)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_areas)
|
||||
return True
|
||||
|
||||
|
||||
@@ -146,27 +145,3 @@ def websocket_update_area(
|
||||
connection.send_error(msg["id"], "invalid_info", str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"], entry.json_fragment)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/area_registry/reorder",
|
||||
vol.Required("area_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_areas(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Handle reorder areas websocket command."""
|
||||
registry = ar.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["area_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
|
||||
websocket_api.async_register_command(hass, websocket_create_floor)
|
||||
websocket_api.async_register_command(hass, websocket_delete_floor)
|
||||
websocket_api.async_register_command(hass, websocket_update_floor)
|
||||
websocket_api.async_register_command(hass, websocket_reorder_floors)
|
||||
return True
|
||||
|
||||
|
||||
@@ -128,28 +127,6 @@ def websocket_update_floor(
|
||||
connection.send_result(msg["id"], _entry_dict(entry))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "config/floor_registry/reorder",
|
||||
vol.Required("floor_ids"): [str],
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@callback
|
||||
def websocket_reorder_floors(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle reorder floors websocket command."""
|
||||
registry = fr.async_get(hass)
|
||||
|
||||
try:
|
||||
registry.async_reorder(msg["floor_ids"])
|
||||
except ValueError as err:
|
||||
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
def _entry_dict(entry: FloorEntry) -> dict[str, Any]:
|
||||
"""Convert entry to API format."""
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Cosori."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "cosori",
|
||||
"name": "Cosori",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "vesync"
|
||||
}
|
||||
@@ -9,7 +9,6 @@ from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import (
|
||||
CONF_AUTHORIZE_STRING,
|
||||
@@ -32,13 +31,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool
|
||||
expires_at=entry.data[CONF_EXPIRES_AT],
|
||||
)
|
||||
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
|
||||
ssl_context = get_default_context()
|
||||
|
||||
try:
|
||||
cync = await Cync.create(
|
||||
auth=cync_auth,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
cync = await Cync.create(cync_auth)
|
||||
except AuthFailedError as ex:
|
||||
raise ConfigEntryAuthFailed("User token invalid") from ex
|
||||
except CyncError as ex:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -26,7 +25,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -169,7 +167,6 @@ class DecoraWifiLight(LightEntity):
|
||||
except ValueError:
|
||||
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||
|
||||
@Throttle(timedelta(seconds=30))
|
||||
def update(self) -> None:
|
||||
"""Fetch new state data for this switch."""
|
||||
try:
|
||||
|
||||
@@ -5,10 +5,5 @@
|
||||
"default": "mdi:chart-line"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"service": "mdi:reload"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,10 +20,8 @@ from homeassistant.const import (
|
||||
ATTR_UNIT_OF_MEASUREMENT,
|
||||
CONF_NAME,
|
||||
CONF_SOURCE,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
@@ -46,7 +44,6 @@ from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.reload import async_setup_reload_service
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -56,7 +53,6 @@ from .const import (
|
||||
CONF_UNIT,
|
||||
CONF_UNIT_PREFIX,
|
||||
CONF_UNIT_TIME,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -89,7 +85,6 @@ DEFAULT_TIME_WINDOW = 0
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
vol.Required(CONF_SOURCE): cv.entity_id,
|
||||
vol.Optional(CONF_ROUND_DIGITS, default=DEFAULT_ROUND): vol.Coerce(int),
|
||||
vol.Optional(CONF_UNIT_PREFIX, default=None): vol.In(UNIT_PREFIXES),
|
||||
@@ -150,8 +145,6 @@ async def async_setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the derivative sensor."""
|
||||
await async_setup_reload_service(hass, DOMAIN, [Platform.SENSOR])
|
||||
|
||||
derivative = DerivativeSensor(
|
||||
hass,
|
||||
name=config.get(CONF_NAME),
|
||||
@@ -161,7 +154,7 @@ async def async_setup_platform(
|
||||
unit_of_measurement=config.get(CONF_UNIT),
|
||||
unit_prefix=config[CONF_UNIT_PREFIX],
|
||||
unit_time=config[CONF_UNIT_TIME],
|
||||
unique_id=config.get(CONF_UNIQUE_ID),
|
||||
unique_id=None,
|
||||
max_sub_interval=config.get(CONF_MAX_SUB_INTERVAL),
|
||||
)
|
||||
|
||||
@@ -293,14 +286,14 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _handle_restore(self) -> None:
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
restored_data = await self.async_get_last_sensor_data()
|
||||
if restored_data:
|
||||
if self._attr_native_unit_of_measurement is None:
|
||||
# Only restore the unit if it's not assigned from YAML
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
self._attr_native_unit_of_measurement = (
|
||||
restored_data.native_unit_of_measurement
|
||||
)
|
||||
try:
|
||||
self._attr_native_value = round(
|
||||
Decimal(restored_data.native_value), # type: ignore[arg-type]
|
||||
@@ -309,11 +302,6 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
except (InvalidOperation, TypeError):
|
||||
self._attr_native_value = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
await self._handle_restore()
|
||||
|
||||
source_state = self.hass.states.get(self._sensor_source_id)
|
||||
self._derive_and_set_attributes_from_state(source_state)
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
reload:
|
||||
@@ -58,11 +58,5 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads derivative sensors from the YAML-configuration.",
|
||||
"name": "[%key:common::action::reload%]"
|
||||
}
|
||||
},
|
||||
"title": "Derivative sensor"
|
||||
}
|
||||
|
||||
259
homeassistant/components/dominos/__init__.py
Normal file
259
homeassistant/components/dominos/__init__.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""Support for Dominos Pizza ordering."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pizzapi import Address, Customer, Order
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The domain of your component. Should be equal to the name of your component.
|
||||
DOMAIN = "dominos"
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
ATTR_COUNTRY = "country_code"
|
||||
ATTR_FIRST_NAME = "first_name"
|
||||
ATTR_LAST_NAME = "last_name"
|
||||
ATTR_EMAIL = "email"
|
||||
ATTR_PHONE = "phone"
|
||||
ATTR_ADDRESS = "address"
|
||||
ATTR_ORDERS = "orders"
|
||||
ATTR_SHOW_MENU = "show_menu"
|
||||
ATTR_ORDER_ENTITY = "order_entity_id"
|
||||
ATTR_ORDER_NAME = "name"
|
||||
ATTR_ORDER_CODES = "codes"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
|
||||
MIN_TIME_BETWEEN_STORE_UPDATES = timedelta(minutes=3330)
|
||||
|
||||
_ORDERS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_NAME): cv.string,
|
||||
vol.Required(ATTR_ORDER_CODES): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_COUNTRY): cv.string,
|
||||
vol.Required(ATTR_FIRST_NAME): cv.string,
|
||||
vol.Required(ATTR_LAST_NAME): cv.string,
|
||||
vol.Required(ATTR_EMAIL): cv.string,
|
||||
vol.Required(ATTR_PHONE): cv.string,
|
||||
vol.Required(ATTR_ADDRESS): cv.string,
|
||||
vol.Optional(ATTR_SHOW_MENU): cv.boolean,
|
||||
vol.Optional(ATTR_ORDERS, default=[]): vol.All(
|
||||
cv.ensure_list, [_ORDERS_SCHEMA]
|
||||
),
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up is called when Home Assistant is loading our component."""
|
||||
dominos = Dominos(hass, config)
|
||||
|
||||
component = EntityComponent[DominosOrder](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DOMAIN] = {}
|
||||
entities: list[DominosOrder] = []
|
||||
conf = config[DOMAIN]
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
"order",
|
||||
dominos.handle_order,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_ORDER_ENTITY): cv.entity_ids,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
if conf.get(ATTR_SHOW_MENU):
|
||||
hass.http.register_view(DominosProductListView(dominos))
|
||||
|
||||
for order_info in conf.get(ATTR_ORDERS):
|
||||
order = DominosOrder(order_info, dominos)
|
||||
entities.append(order)
|
||||
|
||||
component.add_entities(entities)
|
||||
|
||||
# Return boolean to indicate that initialization was successfully.
|
||||
return True
|
||||
|
||||
|
||||
class Dominos:
|
||||
"""Main Dominos service."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Set up main service."""
|
||||
conf = config[DOMAIN]
|
||||
|
||||
self.hass = hass
|
||||
self.customer = Customer(
|
||||
conf.get(ATTR_FIRST_NAME),
|
||||
conf.get(ATTR_LAST_NAME),
|
||||
conf.get(ATTR_EMAIL),
|
||||
conf.get(ATTR_PHONE),
|
||||
conf.get(ATTR_ADDRESS),
|
||||
)
|
||||
self.address = Address(
|
||||
*self.customer.address.split(","), country=conf.get(ATTR_COUNTRY)
|
||||
)
|
||||
self.country = conf.get(ATTR_COUNTRY)
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
|
||||
def handle_order(self, call: ServiceCall) -> None:
|
||||
"""Handle ordering pizza."""
|
||||
entity_ids = call.data[ATTR_ORDER_ENTITY]
|
||||
|
||||
target_orders = [
|
||||
order
|
||||
for order in self.hass.data[DOMAIN]["entities"]
|
||||
if order.entity_id in entity_ids
|
||||
]
|
||||
|
||||
for order in target_orders:
|
||||
order.place()
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_STORE_UPDATES)
|
||||
def update_closest_store(self):
|
||||
"""Update the shared closest store (if open)."""
|
||||
try:
|
||||
self.closest_store = self.address.closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self.closest_store = None
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_menu(self):
|
||||
"""Return the products from the closest stores menu."""
|
||||
self.update_closest_store()
|
||||
if self.closest_store is None:
|
||||
_LOGGER.warning("Cannot get menu. Store may be closed")
|
||||
return []
|
||||
menu = self.closest_store.get_menu()
|
||||
product_entries = []
|
||||
|
||||
for product in menu.products:
|
||||
item = {}
|
||||
if isinstance(product.menu_data["Variants"], list):
|
||||
variants = ", ".join(product.menu_data["Variants"])
|
||||
else:
|
||||
variants = product.menu_data["Variants"]
|
||||
item["name"] = product.name
|
||||
item["variants"] = variants
|
||||
product_entries.append(item)
|
||||
|
||||
return product_entries
|
||||
|
||||
|
||||
class DominosProductListView(http.HomeAssistantView):
|
||||
"""View to retrieve product list content."""
|
||||
|
||||
url = "/api/dominos"
|
||||
name = "api:dominos"
|
||||
|
||||
def __init__(self, dominos):
|
||||
"""Initialize suite view."""
|
||||
self.dominos = dominos
|
||||
|
||||
@callback
|
||||
def get(self, request):
|
||||
"""Retrieve if API is running."""
|
||||
return self.json(self.dominos.get_menu())
|
||||
|
||||
|
||||
class DominosOrder(Entity):
|
||||
"""Represents a Dominos order entity."""
|
||||
|
||||
def __init__(self, order_info, dominos):
|
||||
"""Set up the entity."""
|
||||
self._name = order_info["name"]
|
||||
self._product_codes = order_info["codes"]
|
||||
self._orderable = False
|
||||
self.dominos = dominos
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the orders name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def product_codes(self):
|
||||
"""Return the orders product codes."""
|
||||
return self._product_codes
|
||||
|
||||
@property
|
||||
def orderable(self):
|
||||
"""Return the true if orderable."""
|
||||
return self._orderable
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state either closed, orderable or unorderable."""
|
||||
if self.dominos.closest_store is None:
|
||||
return "closed"
|
||||
return "orderable" if self._orderable else "unorderable"
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Update the order state and refreshes the store."""
|
||||
try:
|
||||
self.dominos.update_closest_store()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
return
|
||||
|
||||
try:
|
||||
order = self.order()
|
||||
order.pay_with()
|
||||
self._orderable = True
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
|
||||
def order(self):
|
||||
"""Create the order object."""
|
||||
if self.dominos.closest_store is None:
|
||||
raise HomeAssistantError("No store available")
|
||||
|
||||
order = Order(
|
||||
self.dominos.closest_store,
|
||||
self.dominos.customer,
|
||||
self.dominos.address,
|
||||
self.dominos.country,
|
||||
)
|
||||
|
||||
for code in self._product_codes:
|
||||
order.add_item(code)
|
||||
|
||||
return order
|
||||
|
||||
def place(self):
|
||||
"""Place the order."""
|
||||
try:
|
||||
order = self.order()
|
||||
order.place()
|
||||
except Exception: # noqa: BLE001
|
||||
self._orderable = False
|
||||
_LOGGER.warning(
|
||||
"Attempted to order Dominos - Order invalid or store closed"
|
||||
)
|
||||
7
homeassistant/components/dominos/icons.json
Normal file
7
homeassistant/components/dominos/icons.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"service": "mdi:pizza"
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/dominos/manifest.json
Normal file
11
homeassistant/components/dominos/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "dominos",
|
||||
"name": "Dominos Pizza",
|
||||
"codeowners": [],
|
||||
"dependencies": ["http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dominos",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pizzapi"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pizzapi==0.0.6"]
|
||||
}
|
||||
6
homeassistant/components/dominos/services.yaml
Normal file
6
homeassistant/components/dominos/services.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
order:
|
||||
fields:
|
||||
order_entity_id:
|
||||
example: dominos.medium_pan
|
||||
selector:
|
||||
text:
|
||||
14
homeassistant/components/dominos/strings.json
Normal file
14
homeassistant/components/dominos/strings.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"services": {
|
||||
"order": {
|
||||
"description": "Places a set of orders with Domino's Pizza.",
|
||||
"fields": {
|
||||
"order_entity_id": {
|
||||
"description": "The ID (as specified in the configuration) of an order to place. If provided as an array, all the identified orders will be placed.",
|
||||
"name": "Order entity"
|
||||
}
|
||||
},
|
||||
"name": "Order"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,6 @@ from typing import Any, cast
|
||||
from aiohttp import ClientSession
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
@@ -19,17 +18,13 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TXT = "txt"
|
||||
@@ -37,13 +32,7 @@ ATTR_TXT = "txt"
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
BACKOFF_INTERVALS = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
|
||||
SERVICE_SET_TXT = "set_txt"
|
||||
|
||||
UPDATE_URL = "https://www.duckdns.org/update"
|
||||
@@ -60,112 +49,39 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SERVICE_TXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIG_ENTRY): ConfigEntrySelector(
|
||||
{
|
||||
"integration": DOMAIN,
|
||||
}
|
||||
),
|
||||
vol.Optional(ATTR_TXT): vol.Any(None, cv.string),
|
||||
}
|
||||
)
|
||||
|
||||
type DuckDnsConfigEntry = ConfigEntry
|
||||
SERVICE_TXT_SCHEMA = vol.Schema({vol.Required(ATTR_TXT): vol.Any(None, cv.string)})
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the DuckDNS component."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_TXT,
|
||||
update_domain_service,
|
||||
schema=SERVICE_TXT_SCHEMA,
|
||||
)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Set up Duck DNS from a config entry."""
|
||||
|
||||
domain: str = config[DOMAIN][CONF_DOMAIN]
|
||||
token: str = config[DOMAIN][CONF_ACCESS_TOKEN]
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async def update_domain_interval(_now: datetime) -> bool:
|
||||
"""Update the DuckDNS entry."""
|
||||
return await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
return await _update_duckdns(session, domain, token)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval_backoff(
|
||||
hass, update_domain_interval, BACKOFF_INTERVALS
|
||||
)
|
||||
intervals = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
async_track_time_interval_backoff(hass, update_domain_interval, intervals)
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
await _update_duckdns(session, domain, token, txt=call.data[ATTR_TXT])
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_TXT, update_domain_service, schema=SERVICE_TXT_SCHEMA
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_config_entry(
|
||||
hass: HomeAssistant, entry_id: str | None = None
|
||||
) -> DuckDnsConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
||||
if entry_id is None:
|
||||
if not (config_entries := hass.config_entries.async_entries(DOMAIN)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
if len(config_entries) != 1:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return config_entries[0]
|
||||
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
|
||||
entry = get_config_entry(call.hass, call.data.get(ATTR_CONFIG_ENTRY))
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
"""Config flow for the Duck DNS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import _update_duckdns
|
||||
from .const import DOMAIN
|
||||
from .issue import deprecate_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DOMAIN): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.TEXT, suffix=".duckdns.org")
|
||||
),
|
||||
vol.Required(CONF_ACCESS_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Duck DNS."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_DOMAIN: user_input[CONF_DOMAIN]})
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await _update_duckdns(
|
||||
session,
|
||||
user_input[CONF_DOMAIN],
|
||||
user_input[CONF_ACCESS_TOKEN],
|
||||
):
|
||||
errors["base"] = "update_failed"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_DOMAIN]}.duckdns.org", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"url": "https://www.duckdns.org/"},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from yaml."""
|
||||
|
||||
self._async_abort_entries_match({CONF_DOMAIN: import_info[CONF_DOMAIN]})
|
||||
result = await self.async_step_user(import_info)
|
||||
if errors := result.get("errors"):
|
||||
deprecate_yaml_issue(self.hass, import_success=False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
deprecate_yaml_issue(self.hass, import_success=True)
|
||||
return result
|
||||
@@ -1,7 +0,0 @@
|
||||
"""Constants for the Duck DNS integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
ATTR_CONFIG_ENTRY: Final = "config_entry_id"
|
||||
@@ -1,40 +0,0 @@
|
||||
"""Issues for Duck DNS integration."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
|
||||
"""Deprecate yaml issue."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Duck DNS",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_error",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_error",
|
||||
translation_placeholders={
|
||||
"url": "/config/integrations/dashboard/add?domain=duckdns"
|
||||
},
|
||||
)
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"domain": "duckdns",
|
||||
"name": "Duck DNS",
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/duckdns",
|
||||
"iot_class": "cloud_polling"
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
set_txt:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: duckdns
|
||||
txt:
|
||||
required: true
|
||||
example: "This domain name is reserved for use in documentation"
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -1,48 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"update_failed": "Updating Duck DNS failed"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "Token",
|
||||
"domain": "Subdomain"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "Your Duck DNS account token",
|
||||
"domain": "The Duck DNS subdomain to update"
|
||||
},
|
||||
"description": "Enter your Duck DNS subdomain and token below to configure dynamic DNS updates. You can find your token on the [Duck DNS]({url}) homepage after logging into your account."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_found": {
|
||||
"message": "Duck DNS integration entry not found"
|
||||
},
|
||||
"entry_not_selected": {
|
||||
"message": "Duck DNS integration entry not selected"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"description": "Configuring Duck DNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Duck DNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "The Duck DNS YAML configuration import failed"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_txt": {
|
||||
"description": "Sets the TXT record of your Duck DNS subdomain.",
|
||||
"description": "Sets the TXT record of your DuckDNS subdomain.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The Duck DNS integration ID.",
|
||||
"name": "Integration ID"
|
||||
},
|
||||
"txt": {
|
||||
"description": "Payload for the TXT record.",
|
||||
"name": "TXT"
|
||||
|
||||
@@ -29,9 +29,9 @@ from homeassistant.const import (
|
||||
UnitOfVolumeFlowRate,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import template
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .config_flow import sensor_name
|
||||
from .const import CONF_ONLY_INCLUDE_FEEDID, FEED_ID, FEED_NAME, FEED_TAG
|
||||
@@ -267,9 +267,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
||||
self._attr_extra_state_attributes[ATTR_USERID] = elem["userid"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIME] = elem["time"]
|
||||
self._attr_extra_state_attributes[ATTR_LASTUPDATETIMESTR] = (
|
||||
dt_util.as_local(
|
||||
dt_util.utc_from_timestamp(float(elem["time"]))
|
||||
).isoformat()
|
||||
template.timestamp_local(float(elem["time"]))
|
||||
)
|
||||
|
||||
self._attr_native_value = None
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
"requirements": ["home-assistant-frontend==20251105.0"]
|
||||
}
|
||||
|
||||
@@ -11,14 +11,11 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DATA_STORAGE: HassKey[dict[str, UserStore]] = HassKey("frontend_storage")
|
||||
DATA_SYSTEM_STORAGE: HassKey[SystemStore] = HassKey("frontend_system_storage")
|
||||
STORAGE_VERSION_USER_DATA = 1
|
||||
STORAGE_VERSION_SYSTEM_DATA = 1
|
||||
|
||||
|
||||
async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
@@ -26,9 +23,6 @@ async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_set_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_set_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_system_data)
|
||||
|
||||
|
||||
async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
|
||||
@@ -89,52 +83,6 @@ class _UserStore(Store[dict[str, Any]]):
|
||||
)
|
||||
|
||||
|
||||
@singleton.singleton(DATA_SYSTEM_STORAGE, async_=True)
|
||||
async def async_system_store(hass: HomeAssistant) -> SystemStore:
|
||||
"""Access the system store."""
|
||||
store = SystemStore(hass)
|
||||
await store.async_load()
|
||||
return store
|
||||
|
||||
|
||||
class SystemStore:
|
||||
"""System store for frontend data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the system store."""
|
||||
self._store: Store[dict[str, Any]] = Store(
|
||||
hass,
|
||||
STORAGE_VERSION_SYSTEM_DATA,
|
||||
"frontend.system_data",
|
||||
)
|
||||
self.data: dict[str, Any] = {}
|
||||
self.subscriptions: dict[str, list[Callable[[], None]]] = {}
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
self.data = await self._store.async_load() or {}
|
||||
|
||||
async def async_set_item(self, key: str, value: Any) -> None:
|
||||
"""Set an item and save the store."""
|
||||
self.data[key] = value
|
||||
self._store.async_delay_save(lambda: self.data, 1.0)
|
||||
for cb in self.subscriptions.get(key, []):
|
||||
cb()
|
||||
|
||||
@callback
|
||||
def async_subscribe(
|
||||
self, key: str, on_update_callback: Callable[[], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to store updates."""
|
||||
self.subscriptions.setdefault(key, []).append(on_update_callback)
|
||||
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from the store."""
|
||||
self.subscriptions[key].remove(on_update_callback)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
def with_user_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], UserStore],
|
||||
@@ -159,28 +107,6 @@ def with_user_store(
|
||||
return with_user_store_func
|
||||
|
||||
|
||||
def with_system_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], SystemStore],
|
||||
Coroutine[Any, Any, None],
|
||||
],
|
||||
) -> Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None]
|
||||
]:
|
||||
"""Decorate function to provide system store."""
|
||||
|
||||
@wraps(orig_func)
|
||||
async def with_system_store_func(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Provide system store to function."""
|
||||
store = await async_system_store(hass)
|
||||
|
||||
await orig_func(hass, connection, msg, store)
|
||||
|
||||
return with_system_store_func
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_user_data",
|
||||
@@ -243,65 +169,3 @@ async def websocket_subscribe_user_data(
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_system_data",
|
||||
vol.Required("key"): str,
|
||||
vol.Required("value"): vol.Any(bool, str, int, float, dict, list, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_set_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle set system data command."""
|
||||
await store.async_set_item(msg["key"], msg["value"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "frontend/get_system_data", vol.Required("key"): str}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_get_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle get system data command."""
|
||||
connection.send_result(msg["id"], {"value": store.data.get(msg["key"])})
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/subscribe_system_data",
|
||||
vol.Required("key"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_subscribe_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle subscribe to system data command."""
|
||||
key: str = msg["key"]
|
||||
|
||||
def on_data_update() -> None:
|
||||
"""Handle system data update."""
|
||||
connection.send_event(msg["id"], {"value": store.data.get(key)})
|
||||
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/generic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["av==16.0.1", "Pillow==12.0.0"]
|
||||
"requirements": ["av==13.1.0", "Pillow==12.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from secrets import token_hex
|
||||
import shutil
|
||||
|
||||
from aiohttp import BasicAuth, ClientSession, UnixConnector
|
||||
from aiohttp import ClientSession
|
||||
from aiohttp.client_exceptions import ClientConnectionError, ServerConnectionError
|
||||
from awesomeversion import AwesomeVersion
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
@@ -37,12 +35,7 @@ from homeassistant.components.camera import (
|
||||
from homeassistant.components.default_config import DOMAIN as DEFAULT_CONFIG_DOMAIN
|
||||
from homeassistant.components.stream import Orientation
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_PASSWORD,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers import (
|
||||
@@ -50,10 +43,7 @@ from homeassistant.helpers import (
|
||||
discovery_flow,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import (
|
||||
async_create_clientsession,
|
||||
async_get_clientsession,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.package import is_docker_env
|
||||
@@ -62,7 +52,6 @@ from .const import (
|
||||
CONF_DEBUG_UI,
|
||||
DEBUG_UI_URL_MESSAGE,
|
||||
DOMAIN,
|
||||
HA_MANAGED_UNIX_SOCKET,
|
||||
HA_MANAGED_URL,
|
||||
RECOMMENDED_VERSION,
|
||||
)
|
||||
@@ -71,49 +60,49 @@ from .server import Server
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_FFMPEG = "ffmpeg"
|
||||
_AUTH = "auth"
|
||||
|
||||
|
||||
def _validate_auth(config: dict) -> dict:
|
||||
"""Validate that username and password are only set when a URL is configured or when debug UI is enabled."""
|
||||
auth_exists = CONF_USERNAME in config
|
||||
debug_ui_enabled = config.get(CONF_DEBUG_UI, False)
|
||||
|
||||
if debug_ui_enabled and not auth_exists:
|
||||
raise vol.Invalid("Username and password must be set when debug_ui is true")
|
||||
|
||||
if auth_exists and CONF_URL not in config and not debug_ui_enabled:
|
||||
raise vol.Invalid(
|
||||
"Username and password can only be set when a URL is configured or debug_ui is true"
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
_SUPPORTED_STREAMS = frozenset(
|
||||
(
|
||||
"bubble",
|
||||
"dvrip",
|
||||
"expr",
|
||||
_FFMPEG,
|
||||
"gopro",
|
||||
"homekit",
|
||||
"http",
|
||||
"https",
|
||||
"httpx",
|
||||
"isapi",
|
||||
"ivideon",
|
||||
"kasa",
|
||||
"nest",
|
||||
"onvif",
|
||||
"roborock",
|
||||
"rtmp",
|
||||
"rtmps",
|
||||
"rtmpx",
|
||||
"rtsp",
|
||||
"rtsps",
|
||||
"rtspx",
|
||||
"tapo",
|
||||
"tcp",
|
||||
"webrtc",
|
||||
"webtorrent",
|
||||
)
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(
|
||||
CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE
|
||||
): cv.boolean,
|
||||
vol.Inclusive(CONF_USERNAME, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
vol.Inclusive(CONF_PASSWORD, _AUTH): vol.All(
|
||||
cv.string, vol.Length(min=1)
|
||||
),
|
||||
}
|
||||
),
|
||||
_validate_auth,
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Exclusive(CONF_URL, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.url,
|
||||
vol.Exclusive(CONF_DEBUG_UI, DOMAIN, DEBUG_UI_URL_MESSAGE): cv.boolean,
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_DATA_GO2RTC: HassKey[Go2RtcConfig] = HassKey(DOMAIN)
|
||||
_DATA_GO2RTC: HassKey[str] = HassKey(DOMAIN)
|
||||
_RETRYABLE_ERRORS = (ClientConnectionError, ServerConnectionError)
|
||||
type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
|
||||
@@ -121,19 +110,12 @@ type Go2RtcConfigEntry = ConfigEntry[WebRTCProvider]
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up WebRTC."""
|
||||
url: str | None = None
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
if DOMAIN not in config and DEFAULT_CONFIG_DOMAIN not in config:
|
||||
await _remove_go2rtc_entries(hass)
|
||||
return True
|
||||
|
||||
domain_config = config.get(DOMAIN, {})
|
||||
username = domain_config.get(CONF_USERNAME)
|
||||
password = domain_config.get(CONF_PASSWORD)
|
||||
|
||||
if not (configured_by_user := DOMAIN in config) or not (
|
||||
url := domain_config.get(CONF_URL)
|
||||
url := config[DOMAIN].get(CONF_URL)
|
||||
):
|
||||
if not is_docker_env():
|
||||
if not configured_by_user:
|
||||
@@ -146,26 +128,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
_LOGGER.error("Could not find go2rtc docker binary")
|
||||
return False
|
||||
|
||||
# Generate random credentials when not provided to secure the server
|
||||
if not username or not password:
|
||||
username = token_hex()
|
||||
password = token_hex()
|
||||
_LOGGER.debug("Generated random credentials for go2rtc server")
|
||||
|
||||
auth = BasicAuth(username, password)
|
||||
# HA will manage the binary
|
||||
# Manually created session (not using the helper) needs to be closed manually
|
||||
# See on_stop listener below
|
||||
session = ClientSession(
|
||||
connector=UnixConnector(path=HA_MANAGED_UNIX_SOCKET), auth=auth
|
||||
)
|
||||
server = Server(
|
||||
hass,
|
||||
binary,
|
||||
session,
|
||||
enable_ui=domain_config.get(CONF_DEBUG_UI, False),
|
||||
username=username,
|
||||
password=password,
|
||||
hass, binary, enable_ui=config.get(DOMAIN, {}).get(CONF_DEBUG_UI, False)
|
||||
)
|
||||
try:
|
||||
await server.start()
|
||||
@@ -175,19 +140,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def on_stop(event: Event) -> None:
|
||||
await server.stop()
|
||||
await session.close()
|
||||
|
||||
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, on_stop)
|
||||
|
||||
url = HA_MANAGED_URL
|
||||
elif username and password:
|
||||
# Create session with BasicAuth if credentials are provided
|
||||
auth = BasicAuth(username, password)
|
||||
session = async_create_clientsession(hass, auth=auth)
|
||||
else:
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
hass.data[_DATA_GO2RTC] = Go2RtcConfig(url, session)
|
||||
hass.data[_DATA_GO2RTC] = url
|
||||
discovery_flow.async_create_flow(
|
||||
hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={}
|
||||
)
|
||||
@@ -203,9 +161,8 @@ async def _remove_go2rtc_entries(hass: HomeAssistant) -> None:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bool:
|
||||
"""Set up go2rtc from a config entry."""
|
||||
|
||||
config = hass.data[_DATA_GO2RTC]
|
||||
url = config.url
|
||||
session = config.session
|
||||
url = hass.data[_DATA_GO2RTC]
|
||||
session = async_get_clientsession(hass)
|
||||
client = Go2RtcRestClient(session, url)
|
||||
# Validate the server URL
|
||||
try:
|
||||
@@ -240,7 +197,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bo
|
||||
return False
|
||||
|
||||
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
|
||||
await provider.initialize()
|
||||
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
|
||||
return True
|
||||
|
||||
@@ -272,21 +228,16 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
self._session = session
|
||||
self._rest_client = rest_client
|
||||
self._sessions: dict[str, Go2RtcWsClient] = {}
|
||||
self._supported_schemes: set[str] = set()
|
||||
|
||||
@property
|
||||
def domain(self) -> str:
|
||||
"""Return the integration domain of the provider."""
|
||||
return DOMAIN
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the provider."""
|
||||
self._supported_schemes = await self._rest_client.schemes.list()
|
||||
|
||||
@callback
|
||||
def async_is_supported(self, stream_source: str) -> bool:
|
||||
"""Return if this provider is supports the Camera as source."""
|
||||
return stream_source.partition(":")[0] in self._supported_schemes
|
||||
return stream_source.partition(":")[0] in _SUPPORTED_STREAMS
|
||||
|
||||
async def async_handle_async_webrtc_offer(
|
||||
self,
|
||||
@@ -414,11 +365,3 @@ class WebRTCProvider(CameraWebRTCProvider):
|
||||
for ws_client in self._sessions.values():
|
||||
await ws_client.close()
|
||||
self._sessions.clear()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Go2RtcConfig:
|
||||
"""Go2rtc configuration."""
|
||||
|
||||
url: str
|
||||
session: ClientSession
|
||||
|
||||
@@ -6,5 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
HA_MANAGED_UNIX_SOCKET = "/run/go2rtc.sock"
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
RECOMMENDED_VERSION = "1.9.11"
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["go2rtc-client==0.3.0"],
|
||||
"requirements": ["go2rtc-client==0.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ from contextlib import suppress
|
||||
import logging
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from go2rtc_client import Go2RtcRestClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_UNIX_SOCKET, HA_MANAGED_URL
|
||||
from .const import HA_MANAGED_API_PORT, HA_MANAGED_URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_TERMINATE_TIMEOUT = 5
|
||||
@@ -23,30 +23,14 @@ _LOG_BUFFER_SIZE = 512
|
||||
_RESPAWN_COOLDOWN = 1
|
||||
|
||||
# Default configuration for HA
|
||||
# - Unix socket for secure local communication
|
||||
# - Basic auth enabled, including local connections
|
||||
# - HTTP API only enabled when UI is enabled
|
||||
# - Api is listening only on localhost
|
||||
# - Enable rtsp for localhost only as ffmpeg needs it
|
||||
# - Clear default ice servers
|
||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||
# Do not edit it manually
|
||||
|
||||
app:
|
||||
modules: {app_modules}
|
||||
|
||||
api:
|
||||
listen: "{listen_config}"
|
||||
unix_listen: "{unix_socket}"
|
||||
allow_paths: {api_allow_paths}
|
||||
local_auth: true
|
||||
username: {username}
|
||||
password: {password}
|
||||
|
||||
# ffmpeg needs the exec module
|
||||
# Restrict execution to only ffmpeg binary
|
||||
exec:
|
||||
allow_paths:
|
||||
- ffmpeg
|
||||
listen: "{api_ip}:{api_port}"
|
||||
|
||||
rtsp:
|
||||
listen: "127.0.0.1:18554"
|
||||
@@ -56,43 +40,6 @@ webrtc:
|
||||
ice_servers: []
|
||||
"""
|
||||
|
||||
_APP_MODULES = (
|
||||
"api",
|
||||
"exec", # Execution module for ffmpeg
|
||||
"ffmpeg",
|
||||
"http",
|
||||
"mjpeg",
|
||||
"onvif",
|
||||
"rtmp",
|
||||
"rtsp",
|
||||
"srtp",
|
||||
"webrtc",
|
||||
"ws",
|
||||
)
|
||||
|
||||
_API_ALLOW_PATHS = (
|
||||
"/", # UI static page and version control
|
||||
"/api", # Main API path
|
||||
"/api/frame.jpeg", # Snapshot functionality
|
||||
"/api/schemes", # Supported stream schemes
|
||||
"/api/streams", # Stream management
|
||||
"/api/webrtc", # Webrtc functionality
|
||||
"/api/ws", # Websocket functionality (e.g. webrtc candidates)
|
||||
)
|
||||
|
||||
# Additional modules when UI is enabled
|
||||
_UI_APP_MODULES = (
|
||||
*_APP_MODULES,
|
||||
"debug",
|
||||
)
|
||||
# Additional api paths when UI is enabled
|
||||
_UI_API_ALLOW_PATHS = (
|
||||
*_API_ALLOW_PATHS,
|
||||
"/api/config", # UI config view
|
||||
"/api/log", # UI log view
|
||||
"/api/streams.dot", # UI network view
|
||||
)
|
||||
|
||||
_LOG_LEVEL_MAP = {
|
||||
"TRC": logging.DEBUG,
|
||||
"DBG": logging.DEBUG,
|
||||
@@ -114,40 +61,14 @@ class Go2RTCWatchdogError(HomeAssistantError):
|
||||
"""Raised on watchdog error."""
|
||||
|
||||
|
||||
def _format_list_for_yaml(items: tuple[str, ...]) -> str:
|
||||
"""Format a list of strings for yaml config."""
|
||||
if not items:
|
||||
return "[]"
|
||||
formatted_items = ",".join(f'"{item}"' for item in items)
|
||||
return f"[{formatted_items}]"
|
||||
|
||||
|
||||
def _create_temp_file(enable_ui: bool, username: str, password: str) -> str:
|
||||
def _create_temp_file(api_ip: str) -> str:
|
||||
"""Create temporary config file."""
|
||||
app_modules: tuple[str, ...] = _APP_MODULES
|
||||
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
|
||||
|
||||
if enable_ui:
|
||||
app_modules = _UI_APP_MODULES
|
||||
api_paths = _UI_API_ALLOW_PATHS
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
listen_config = f":{HA_MANAGED_API_PORT}"
|
||||
else:
|
||||
# Disable HTTP listening when UI is not enabled
|
||||
# as HA does not use it.
|
||||
listen_config = ""
|
||||
|
||||
# Set delete=False to prevent the file from being deleted when the file is closed
|
||||
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
||||
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
||||
file.write(
|
||||
_GO2RTC_CONFIG_FORMAT.format(
|
||||
listen_config=listen_config,
|
||||
unix_socket=HA_MANAGED_UNIX_SOCKET,
|
||||
app_modules=_format_list_for_yaml(app_modules),
|
||||
api_allow_paths=_format_list_for_yaml(api_paths),
|
||||
username=username,
|
||||
password=password,
|
||||
api_ip=api_ip, api_port=HA_MANAGED_API_PORT
|
||||
).encode()
|
||||
)
|
||||
return file.name
|
||||
@@ -157,25 +78,18 @@ class Server:
|
||||
"""Go2rtc server."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
binary: str,
|
||||
session: ClientSession,
|
||||
*,
|
||||
enable_ui: bool = False,
|
||||
username: str,
|
||||
password: str,
|
||||
self, hass: HomeAssistant, binary: str, *, enable_ui: bool = False
|
||||
) -> None:
|
||||
"""Initialize the server."""
|
||||
self._hass = hass
|
||||
self._binary = binary
|
||||
self._session = session
|
||||
self._enable_ui = enable_ui
|
||||
self._username = username
|
||||
self._password = password
|
||||
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
||||
self._process: asyncio.subprocess.Process | None = None
|
||||
self._startup_complete = asyncio.Event()
|
||||
self._api_ip = _LOCALHOST_IP
|
||||
if enable_ui:
|
||||
# Listen on all interfaces for allowing access from all ips
|
||||
self._api_ip = ""
|
||||
self._watchdog_task: asyncio.Task | None = None
|
||||
self._watchdog_tasks: list[asyncio.Task] = []
|
||||
|
||||
@@ -190,7 +104,7 @@ class Server:
|
||||
"""Start the server."""
|
||||
_LOGGER.debug("Starting go2rtc server")
|
||||
config_file = await self._hass.async_add_executor_job(
|
||||
_create_temp_file, self._enable_ui, self._username, self._password
|
||||
_create_temp_file, self._api_ip
|
||||
)
|
||||
|
||||
self._startup_complete.clear()
|
||||
@@ -219,7 +133,7 @@ class Server:
|
||||
raise Go2RTCServerStartError from err
|
||||
|
||||
# Check the server version
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
await client.validate_server_version()
|
||||
|
||||
async def _log_output(self, process: asyncio.subprocess.Process) -> None:
|
||||
@@ -291,7 +205,7 @@ class Server:
|
||||
|
||||
async def _monitor_api(self) -> None:
|
||||
"""Raise if the go2rtc process terminates."""
|
||||
client = Go2RtcRestClient(self._session, HA_MANAGED_URL)
|
||||
client = Go2RtcRestClient(async_get_clientsession(self._hass), HA_MANAGED_URL)
|
||||
|
||||
_LOGGER.debug("Monitoring go2rtc API")
|
||||
try:
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["gassist-text==0.0.14"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: No polling.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration acts as a service and does not represent physical devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: This is a cloud service integration that cannot be discovered locally.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repairs.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library uses gRPC, not aiohttp/httpx, for communication.
|
||||
strict-typing: done
|
||||
@@ -56,9 +56,6 @@
|
||||
"init": {
|
||||
"data": {
|
||||
"language_code": "Language code"
|
||||
},
|
||||
"data_description": {
|
||||
"language_code": "Language for the Google Assistant SDK requests and responses."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,6 @@ from .const import DOMAIN
|
||||
if TYPE_CHECKING:
|
||||
from . import GoogleSheetsConfigEntry
|
||||
|
||||
ADD_CREATED_COLUMN = "add_created_column"
|
||||
DATA = "data"
|
||||
DATA_CONFIG_ENTRY = "config_entry"
|
||||
ROWS = "rows"
|
||||
@@ -44,7 +43,6 @@ SHEET_SERVICE_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(WORKSHEET): cv.string,
|
||||
vol.Optional(ADD_CREATED_COLUMN, default=True): cv.boolean,
|
||||
vol.Required(DATA): vol.Any(cv.ensure_list, [dict]),
|
||||
},
|
||||
)
|
||||
@@ -71,11 +69,10 @@ def _append_to_sheet(call: ServiceCall, entry: GoogleSheetsConfigEntry) -> None:
|
||||
|
||||
worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title))
|
||||
columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), [])
|
||||
add_created_column = call.data[ADD_CREATED_COLUMN]
|
||||
now = str(datetime.now())
|
||||
rows = []
|
||||
for d in call.data[DATA]:
|
||||
row_data = ({"created": now} | d) if add_created_column else d
|
||||
row_data = {"created": now} | d
|
||||
row = [row_data.get(column, "") for column in columns]
|
||||
for key, value in row_data.items():
|
||||
if key not in columns:
|
||||
|
||||
@@ -9,11 +9,6 @@ append_sheet:
|
||||
example: "Sheet1"
|
||||
selector:
|
||||
text:
|
||||
add_created_column:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
data:
|
||||
required: true
|
||||
example: '{"hello": world, "cool": True, "count": 5}'
|
||||
|
||||
@@ -45,10 +45,6 @@
|
||||
"append_sheet": {
|
||||
"description": "Appends data to a worksheet in Google Sheets.",
|
||||
"fields": {
|
||||
"add_created_column": {
|
||||
"description": "Add a \"created\" column with the current date-time to the appended data.",
|
||||
"name": "Add created column"
|
||||
},
|
||||
"config_entry": {
|
||||
"description": "The sheet to add data to.",
|
||||
"name": "Sheet"
|
||||
|
||||
@@ -97,8 +97,7 @@ SENSOR_DESCRIPTIONS = [
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
suggested_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -175,7 +174,7 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
if self._route is None:
|
||||
return None
|
||||
|
||||
return self._route.duration.seconds
|
||||
return round(self._route.duration.seconds / 60)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
|
||||
@@ -20,7 +20,7 @@ from .coordinator import (
|
||||
GoogleWeatherSubEntryRuntimeData,
|
||||
)
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WEATHER]
|
||||
_PLATFORMS: list[Platform] = [Platform.WEATHER]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -16,15 +16,10 @@ class GoogleWeatherBaseEntity(Entity):
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: GoogleWeatherConfigEntry,
|
||||
subentry: ConfigSubentry,
|
||||
unique_id_suffix: str | None = None,
|
||||
self, config_entry: GoogleWeatherConfigEntry, subentry: ConfigSubentry
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
if unique_id_suffix is not None:
|
||||
self._attr_unique_id += f"_{unique_id_suffix.lower()}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"cloud_coverage": {
|
||||
"default": "mdi:weather-cloudy"
|
||||
},
|
||||
"precipitation_probability": {
|
||||
"default": "mdi:weather-rainy"
|
||||
},
|
||||
"precipitation_qpf": {
|
||||
"default": "mdi:cup-water"
|
||||
},
|
||||
"thunderstorm_probability": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"uv_index": {
|
||||
"default": "mdi:weather-sunny-alert"
|
||||
},
|
||||
"visibility": {
|
||||
"default": "mdi:eye"
|
||||
},
|
||||
"weather_condition": {
|
||||
"default": "mdi:card-text-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user