Compare commits

..

26 Commits

Author SHA1 Message Date
dependabot[bot]
8c953b0c4e Bump github/codeql-action from 4.30.8 to 4.30.9 (#154858)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-20 09:22:32 +02:00
Kinachi249
949544874f Bump PyCync to 0.4.2 (#154856) 2025-10-20 07:02:10 +02:00
Jordan Harvey
237407010a Add number platform to nintendo_parental_controls integration (#154548) 2025-10-20 07:00:29 +02:00
Manu
64e48816c7 Rename Xbox Live to Xbox Network in NextDNS (#154855) 2025-10-20 06:55:06 +02:00
Manu
6b76b3e729 Fix typos in exception translations of Xbox integration (#154849) 2025-10-20 01:09:03 +03:00
Erwin Douna
4912280193 Portainer add endoint sensors (#154676)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-19 22:19:57 +02:00
Manu
d4e72ad2cf Refactor Xbox integration setup and exception handling (#154823) 2025-10-19 22:18:56 +02:00
Thomas55555
711526fc6c Remove brackets from decorator in Husqvarna Automower (#154042) 2025-10-19 22:13:20 +02:00
Felipe Santos
4be428fce7 Set Pyright level as basic by default for VS Code (#154495) 2025-10-19 22:04:01 +02:00
asafhas
ea226806a0 Tuya Alarm-Control: Ignore low-battery warnings (#152888)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-10-19 22:01:59 +02:00
Whitney Young
bc77daf2ce OpenUV: Add protection window tests (#154498) 2025-10-19 21:57:26 +02:00
Benjamin Michaelis
acead56bd5 Enhance check_config script with JSON output and fail on warnings (#152575) 2025-10-19 21:55:55 +02:00
johnmschoonover
fd08c55b79 declaraing typing fixes handling for agents (#154833) 2025-10-19 21:53:44 +02:00
cdnninja
0c342c4750 vesync show fan speed for smart tower fans (#154842) 2025-10-19 21:53:16 +02:00
Alex Hermann
da6986e58c Allow overriding recipients per message in XMPP (#149375)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-19 21:50:46 +02:00
Jan-Philipp Benecke
2f5fbc1f0e Add instance ID (mDNS) conflict detection and repair flow for zeroconf integration (#151487)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-19 21:37:10 +02:00
tronikos
e79c76cd35 Add reconfigure flow in SolarEdge (#154189) 2025-10-19 21:33:23 +02:00
Sebastian Faul
6edafd8965 Fix incorrect forward header handling (#154793) 2025-10-19 21:26:12 +02:00
Shay Levy
204ff5d45f Add valve group support (#154749)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-19 21:01:15 +02:00
Anuj
591eb94515 Moved non-translatable URL out of strings.json for plex (#154826) 2025-10-19 19:49:57 +02:00
Manu
0f3de627c5 Refactor sensors and binary sensors in Xbox integration (#154719) 2025-10-19 19:49:36 +02:00
Thomas55555
b2699d8a03 Bump aioautomower to v2.3.1 (#151795) 2025-10-19 19:48:42 +02:00
Markus Adrario
769a770cf1 Code quality followup to Homee stale devices (#154741)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-10-19 19:42:45 +02:00
Felipe Santos
2d96e8ac4d Bump OpenRGB to Silver (#154690) 2025-10-19 19:42:20 +02:00
ElectricSteve
354cacdcae Fix pterodactyl server config link (#154758) 2025-10-19 18:18:31 +02:00
Marc Mueller
d999dd05d1 Improve bluesound conftest function (#154828) 2025-10-19 18:20:16 +03:00
81 changed files with 4461 additions and 576 deletions

View File

@@ -41,6 +41,7 @@
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"python.analysis.typeCheckingMode": "basic",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,

View File

@@ -74,6 +74,7 @@ rules:
- **Formatting**: Ruff
- **Linting**: PyLint and Ruff
- **Type Checking**: MyPy
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
- **Testing**: pytest with plain functions and fixtures
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
with:
category: "/language:python"

View File

@@ -80,8 +80,6 @@ jobs:
# Add additional pip wheel build constraints
echo "PIP_CONSTRAINT=build_constraints.txt"
echo 'CFLAGS="-Wno-error=int-conversion"'
) > .env_file
- name: Write pip wheel build constraints
@@ -128,13 +126,13 @@ jobs:
core:
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
if: false && github.repository_owner == 'home-assistant'
if: github.repository_owner == 'home-assistant'
needs: init
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix: &matrix-build
abi: ["cp314"]
abi: ["cp313", "cp314"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
include:
- os: ubuntu-latest
@@ -221,29 +219,9 @@ jobs:
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
- name: Create requirements file for custom build
run: |
touch requirements_custom.txt
echo "netifaces==0.11.0" >> requirements_custom.txt
- name: Build wheels (custom)
uses: cdce8p/wheels@master
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements: "requirements_custom.txt"
verbose: true
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: *home-assistant-wheels
if: false
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -7,6 +7,8 @@
"python.testing.pytestEnabled": false,
// https://code.visualstudio.com/docs/python/linting#_general-settings
"pylint.importStrategy": "fromEnvironment",
// Pyright is too pedantic for Home Assistant
"python.analysis.typeCheckingMode": "basic",
"json.schemas": [
{
"fileMatch": [

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"quality_scale": "bronze",
"requirements": ["pycync==0.4.1"]
"requirements": ["pycync==0.4.2"]
}

View File

@@ -72,6 +72,7 @@ PLATFORMS = [
Platform.NOTIFY,
Platform.SENSOR,
Platform.SWITCH,
Platform.VALVE,
]
_LOGGER = logging.getLogger(__name__)

View File

@@ -35,6 +35,7 @@ from .media_player import MediaPlayerGroup, async_create_preview_media_player
from .notify import async_create_preview_notify
from .sensor import async_create_preview_sensor
from .switch import async_create_preview_switch
from .valve import async_create_preview_valve
_STATISTIC_MEASURES = [
"last",
@@ -172,6 +173,7 @@ GROUP_TYPES = [
"notify",
"sensor",
"switch",
"valve",
]
@@ -253,6 +255,11 @@ CONFIG_FLOW = {
preview="group",
validate_user_input=set_group_type("switch"),
),
"valve": SchemaFlowFormStep(
basic_group_config_schema("valve"),
preview="group",
validate_user_input=set_group_type("valve"),
),
}
@@ -302,6 +309,10 @@ OPTIONS_FLOW = {
partial(light_switch_options_schema, "switch"),
preview="group",
),
"valve": SchemaFlowFormStep(
partial(basic_group_options_schema, "valve"),
preview="group",
),
}
PREVIEW_OPTIONS_SCHEMA: dict[str, vol.Schema] = {}
@@ -321,6 +332,7 @@ CREATE_PREVIEW_ENTITY: dict[
"notify": async_create_preview_notify,
"sensor": async_create_preview_sensor,
"switch": async_create_preview_switch,
"valve": async_create_preview_valve,
}

View File

@@ -16,7 +16,8 @@
"media_player": "Media player group",
"notify": "Notify group",
"sensor": "Sensor group",
"switch": "Switch group"
"switch": "Switch group",
"valve": "Valve group"
}
},
"binary_sensor": {
@@ -127,6 +128,18 @@
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
},
"valve": {
"title": "[%key:component::group::config::step::user::title%]",
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
}
}
},
@@ -212,6 +225,16 @@
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
},
"valve": {
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
}
}
},

View File

@@ -0,0 +1,262 @@
"""Platform allowing several valves to be grouped into one valve."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.components.valve import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DOMAIN as VALVE_DOMAIN,
PLATFORM_SCHEMA as VALVE_PLATFORM_SCHEMA,
ValveEntity,
ValveEntityFeature,
ValveState,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
CONF_NAME,
CONF_UNIQUE_ID,
SERVICE_CLOSE_VALVE,
SERVICE_OPEN_VALVE,
SERVICE_SET_VALVE_POSITION,
SERVICE_STOP_VALVE,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .entity import GroupEntity
from .util import reduce_attribute
KEY_OPEN_CLOSE = "open_close"
KEY_STOP = "stop"
KEY_SET_POSITION = "set_position"
DEFAULT_NAME = "Valve Group"
# No limit on parallel updates to enable a group calling another group
PARALLEL_UPDATES = 0
PLATFORM_SCHEMA = VALVE_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_ENTITIES): cv.entities_domain(VALVE_DOMAIN),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Valve Group platform."""
async_add_entities(
[
ValveGroup(
config.get(CONF_UNIQUE_ID), config[CONF_NAME], config[CONF_ENTITIES]
)
]
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Initialize Valve Group config entry."""
registry = er.async_get(hass)
entities = er.async_validate_entity_ids(
registry, config_entry.options[CONF_ENTITIES]
)
async_add_entities(
[ValveGroup(config_entry.entry_id, config_entry.title, entities)]
)
@callback
def async_create_preview_valve(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> ValveGroup:
"""Create a preview valve."""
return ValveGroup(
None,
name,
validated_config[CONF_ENTITIES],
)
class ValveGroup(GroupEntity, ValveEntity):
"""Representation of a ValveGroup."""
_attr_available: bool = False
_attr_current_valve_position: int | None = None
_attr_is_closed: bool | None = None
_attr_is_closing: bool | None = False
_attr_is_opening: bool | None = False
_attr_reports_position: bool = False
def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None:
"""Initialize a ValveGroup entity."""
self._entity_ids = entities
self._valves: dict[str, set[str]] = {
KEY_OPEN_CLOSE: set(),
KEY_STOP: set(),
KEY_SET_POSITION: set(),
}
self._attr_name = name
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entities}
self._attr_unique_id = unique_id
@callback
def async_update_supported_features(
self,
entity_id: str,
new_state: State | None,
) -> None:
"""Update dictionaries with supported features."""
if not new_state:
for values in self._valves.values():
values.discard(entity_id)
return
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if features & (ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE):
self._valves[KEY_OPEN_CLOSE].add(entity_id)
else:
self._valves[KEY_OPEN_CLOSE].discard(entity_id)
if features & (ValveEntityFeature.STOP):
self._valves[KEY_STOP].add(entity_id)
else:
self._valves[KEY_STOP].discard(entity_id)
if features & (ValveEntityFeature.SET_POSITION):
self._valves[KEY_SET_POSITION].add(entity_id)
else:
self._valves[KEY_SET_POSITION].discard(entity_id)
async def async_open_valve(self) -> None:
"""Open the valves."""
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
VALVE_DOMAIN, SERVICE_OPEN_VALVE, data, blocking=True, context=self._context
)
async def async_handle_open_valve(self) -> None: # type: ignore[misc]
"""Open the valves.
Override the base class to avoid calling the set position service
for all valves. Transfer the service call to the base class and let
it decide if the valve uses set position or open service.
"""
await self.async_open_valve()
async def async_close_valve(self) -> None:
"""Close valves."""
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
await self.hass.services.async_call(
VALVE_DOMAIN,
SERVICE_CLOSE_VALVE,
data,
blocking=True,
context=self._context,
)
async def async_handle_close_valve(self) -> None: # type: ignore[misc]
"""Close the valves.
Override the base class to avoid calling the set position service
for all valves. Transfer the service call to the base class and let
it decide if the valve uses set position or close service.
"""
await self.async_close_valve()
async def async_set_valve_position(self, position: int) -> None:
"""Move the valves to a specific position."""
data = {
ATTR_ENTITY_ID: self._valves[KEY_SET_POSITION],
ATTR_POSITION: position,
}
await self.hass.services.async_call(
VALVE_DOMAIN,
SERVICE_SET_VALVE_POSITION,
data,
blocking=True,
context=self._context,
)
async def async_stop_valve(self) -> None:
"""Stop the valves."""
data = {ATTR_ENTITY_ID: self._valves[KEY_STOP]}
await self.hass.services.async_call(
VALVE_DOMAIN, SERVICE_STOP_VALVE, data, blocking=True, context=self._context
)
@callback
def async_update_group_state(self) -> None:
"""Update state and attributes."""
states = [
state
for entity_id in self._entity_ids
if (state := self.hass.states.get(entity_id)) is not None
]
# Set group as unavailable if all members are unavailable or missing
self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states)
self._attr_is_closed = True
self._attr_is_closing = False
self._attr_is_opening = False
self._attr_reports_position = False
self._update_assumed_state_from_members()
for state in states:
if state.attributes.get(ATTR_CURRENT_POSITION) is not None:
self._attr_reports_position = True
if state.state == ValveState.OPEN:
self._attr_is_closed = False
continue
if state.state == ValveState.CLOSED:
continue
if state.state == ValveState.CLOSING:
self._attr_is_closing = True
continue
if state.state == ValveState.OPENING:
self._attr_is_opening = True
continue
valid_state = any(
state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) for state in states
)
if not valid_state:
# Set as unknown if all members are unknown or unavailable
self._attr_is_closed = None
self._attr_current_valve_position = reduce_attribute(
states, ATTR_CURRENT_POSITION
)
supported_features = ValveEntityFeature(0)
if self._valves[KEY_OPEN_CLOSE]:
supported_features |= ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
if self._valves[KEY_STOP]:
supported_features |= ValveEntityFeature.STOP
if self._valves[KEY_SET_POSITION]:
supported_features |= ValveEntityFeature.SET_POSITION
self._attr_supported_features = supported_features

View File

@@ -94,13 +94,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
for device in devices:
# Check if the device is still present in homee
device_identifiers = {identifier[1] for identifier in device.identifiers}
# homee itself uses just the uid, nodes use uid-nodeid
is_homee_hub = homee.settings.uid in device_identifiers
# homee itself uses just the uid, nodes use {uid}-{nodeid}
if homee.settings.uid in device_identifiers:
continue # Hub itself is never removed.
is_node_present = any(
f"{homee.settings.uid}-{node.id}" in device_identifiers
for node in homee.nodes
)
if not is_node_present and not is_homee_hub:
if not is_node_present:
_LOGGER.info("Removing device %s", device.name)
device_registry.async_update_device(
device_id=device.id,
@@ -110,16 +111,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
# Remove device at runtime when node is removed in homee
async def _remove_node_callback(node: HomeeNode, add: bool) -> None:
"""Call when a node is removed."""
if not add:
device = device_registry.async_get_device(
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
if add:
return
device = device_registry.async_get_device(
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
)
if device:
_LOGGER.info("Removing device %s", device.name)
device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=entry.entry_id,
)
if device:
_LOGGER.info("Removing device %s", device.name)
device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=entry.entry_id,
)
homee.add_nodes_listener(_remove_node_callback)

View File

@@ -43,18 +43,22 @@ def async_setup_forwarded(
some proxies, for example, Kubernetes NGINX ingress, only retain one element
in the X-Forwarded-Proto header. In that case, we'll just use what we have.
`X-Forwarded-Host: <host>`
e.g., `X-Forwarded-Host: example.com`
`X-Forwarded-Host: <host1>, <host2>, <host3>`
e.g., `X-Forwarded-Host: example.com, proxy.example.com, backend.example.com`
OR `X-Forwarded-Host: example.com` (one entry, even with multiple proxies)
If the previous headers are processed successfully, and the X-Forwarded-Host is
present, it will be used.
present, the last one in the list will be used (set by the proxy nearest to the backend).
Multiple headers are valid as stated in https://www.rfc-editor.org/rfc/rfc7239#section-7.1
If multiple headers are present, they are handled according to
https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-For#parsing
> "split each X-Forwarded-For header by comma into lists and then join the lists."
Additionally:
- If no X-Forwarded-For header is found, the processing of all headers is skipped.
- Throw HTTP 400 status when untrusted connected peer provides
X-Forwarded-For headers.
- If multiple instances of X-Forwarded-For, X-Forwarded-Proto or
X-Forwarded-Host are found, an HTTP 400 status code is thrown.
- If malformed or invalid (IP) data in X-Forwarded-For header is found,
an HTTP 400 status code is thrown.
- The connected client peer on the socket of the incoming connection,
@@ -111,15 +115,12 @@ def async_setup_forwarded(
)
raise HTTPBadRequest
# Multiple X-Forwarded-For headers
if len(forwarded_for_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forwarded-For: %s", forwarded_for_headers
# Process multiple X-Forwarded-For from the right side (by reversing the list)
forwarded_for_split = list(
reversed(
[addr for header in forwarded_for_headers for addr in header.split(",")]
)
raise HTTPBadRequest
# Process X-Forwarded-For from the right side (by reversing the list)
forwarded_for_split = list(reversed(forwarded_for_headers[0].split(",")))
)
try:
forwarded_for = [ip_address(addr.strip()) for addr in forwarded_for_split]
except ValueError as err:
@@ -148,14 +149,15 @@ def async_setup_forwarded(
X_FORWARDED_PROTO, []
)
if forwarded_proto_headers:
if len(forwarded_proto_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forward-Proto: %s", forwarded_proto_headers
)
raise HTTPBadRequest
# Process multiple X-Forwarded-Proto from the right side (by reversing the list)
forwarded_proto_split = list(
reversed(forwarded_proto_headers[0].split(","))
reversed(
[
addr
for header in forwarded_proto_headers
for addr in header.split(",")
]
)
)
forwarded_proto = [proto.strip() for proto in forwarded_proto_split]
@@ -191,14 +193,16 @@ def async_setup_forwarded(
# Handle X-Forwarded-Host
forwarded_host_headers: list[str] = request.headers.getall(X_FORWARDED_HOST, [])
if forwarded_host_headers:
# Multiple X-Forwarded-Host headers
if len(forwarded_host_headers) > 1:
_LOGGER.error(
"Too many headers for X-Forwarded-Host: %s", forwarded_host_headers
# Process multiple X-Forwarded-Host from the right side (by reversing the list)
forwarded_host = list(
reversed(
[
addr.strip()
for header in forwarded_host_headers
for addr in header.split(",")
]
)
raise HTTPBadRequest
forwarded_host = forwarded_host_headers[0].strip()
)[0]
if not forwarded_host:
_LOGGER.error("Empty value received in X-Forward-Host header")
raise HTTPBadRequest

View File

@@ -112,7 +112,7 @@ class AutomowerButtonEntity(AutomowerControlEntity, ButtonEntity):
self.mower_attributes
)
@handle_sending_exception()
@handle_sending_exception
async def async_press(self) -> None:
"""Send a command to the mower."""
await self.entity_description.press_fn(self.coordinator.api, self.mower_id)

View File

@@ -182,14 +182,6 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
"Failed to listen to websocket. Trying to reconnect: %s",
err,
)
if not hass.is_stopping:
await asyncio.sleep(self.reconnect_time)
self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME)
entry.async_create_background_task(
hass,
self.client_listen(hass, entry, automower_client),
"reconnect_task",
)
def _should_poll(self) -> bool:
"""Return True if at least one mower is connected and at least one is not OFF."""

View File

@@ -6,7 +6,7 @@ import asyncio
from collections.abc import Callable, Coroutine
import functools
import logging
from typing import TYPE_CHECKING, Any, Concatenate
from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar, overload
from aioautomower.exceptions import ApiError
from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea
@@ -37,23 +37,42 @@ ERROR_STATES = [
]
@callback
def _work_area_translation_key(work_area_id: int, key: str) -> str:
"""Return the translation key."""
if work_area_id == 0:
return f"my_lawn_{key}"
return f"work_area_{key}"
_Entity = TypeVar("_Entity", bound="AutomowerBaseEntity")
_P = ParamSpec("_P")
type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]
@overload
def handle_sending_exception(
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]: ...
def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P](
@overload
def handle_sending_exception(
*,
poll_after_sending: bool = False,
) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]:
) -> Callable[
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
]: ...
def handle_sending_exception(
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]] | None = None,
*,
poll_after_sending: bool = False,
) -> (
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]
| Callable[
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
]
):
"""Handle exceptions while sending a command and optionally refresh coordinator."""
def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]:
def decorator(
func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]:
@functools.wraps(func)
async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None:
try:
@@ -73,7 +92,20 @@ def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P](
return wrapper
return decorator
if _func is None:
# call with brackets: @handle_sending_exception(...)
return decorator
# call without brackets: @handle_sending_exception
return decorator(_func)
@callback
def _work_area_translation_key(work_area_id: int, key: str) -> str:
"""Return the translation key."""
if work_area_id == 0:
return f"my_lawn_{key}"
return f"work_area_{key}"
class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):

View File

@@ -135,22 +135,22 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
"""Return the work areas of the mower."""
return self.mower_attributes.work_areas
@handle_sending_exception()
@handle_sending_exception
async def async_start_mowing(self) -> None:
"""Resume schedule."""
await self.coordinator.api.commands.resume_schedule(self.mower_id)
@handle_sending_exception()
@handle_sending_exception
async def async_pause(self) -> None:
"""Pauses the mower."""
await self.coordinator.api.commands.pause_mowing(self.mower_id)
@handle_sending_exception()
@handle_sending_exception
async def async_dock(self) -> None:
"""Parks the mower until next schedule."""
await self.coordinator.api.commands.park_until_next_schedule(self.mower_id)
@handle_sending_exception()
@handle_sending_exception
async def async_override_schedule(
self, override_mode: str, duration: timedelta
) -> None:
@@ -160,7 +160,7 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
if override_mode == PARK:
await self.coordinator.api.commands.park_for(self.mower_id, duration)
@handle_sending_exception()
@handle_sending_exception
async def async_override_schedule_work_area(
self, work_area_id: int, duration: timedelta
) -> None:

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_push",
"loggers": ["aioautomower"],
"quality_scale": "silver",
"requirements": ["aioautomower==2.2.1"]
"requirements": ["aioautomower==2.3.1"]
}

View File

@@ -67,7 +67,7 @@ class AutomowerSelectEntity(AutomowerControlEntity, SelectEntity):
"""Return the current option for the entity."""
return cast(HeadlightModes, self.mower_attributes.settings.headlight.mode)
@handle_sending_exception()
@handle_sending_exception
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
await self.coordinator.api.commands.set_headlight_mode(

View File

@@ -108,12 +108,12 @@ class AutomowerScheduleSwitchEntity(AutomowerControlEntity, SwitchEntity):
"""Return the state of the switch."""
return self.mower_attributes.mower.mode != MowerModes.HOME
@handle_sending_exception()
@handle_sending_exception
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
await self.coordinator.api.commands.park_until_further_notice(self.mower_id)
@handle_sending_exception()
@handle_sending_exception
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self.coordinator.api.commands.resume_schedule(self.mower_id)

View File

@@ -320,7 +320,7 @@
"name": "Block WhatsApp"
},
"block_xboxlive": {
"name": "Block Xbox Live"
"name": "Block Xbox Network"
},
"block_youtube": {
"name": "Block YouTube"

View File

@@ -16,7 +16,12 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_SESSION_TOKEN, DOMAIN
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.TIME, Platform.SWITCH]
_PLATFORMS: list[Platform] = [
Platform.SENSOR,
Platform.TIME,
Platform.SWITCH,
Platform.NUMBER,
]
async def async_setup_entry(

View File

@@ -0,0 +1,91 @@
"""Number platform for Nintendo Parental controls."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from enum import StrEnum
from typing import Any
from homeassistant.components.number import (
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
from .entity import Device, NintendoDevice
PARALLEL_UPDATES = 0
class NintendoParentalNumber(StrEnum):
"""Store keys for Nintendo Parental numbers."""
TODAY_MAX_SCREENTIME = "today_max_screentime"
@dataclass(kw_only=True, frozen=True)
class NintendoParentalControlsNumberEntityDescription(NumberEntityDescription):
"""Description for Nintendo Parental number entities."""
value_fn: Callable[[Device], int | float | None]
set_native_value_fn: Callable[[Device, float], Coroutine[Any, Any, None]]
NUMBER_DESCRIPTIONS: tuple[NintendoParentalControlsNumberEntityDescription, ...] = (
NintendoParentalControlsNumberEntityDescription(
key=NintendoParentalNumber.TODAY_MAX_SCREENTIME,
translation_key=NintendoParentalNumber.TODAY_MAX_SCREENTIME,
native_min_value=-1,
native_step=1,
native_max_value=360,
native_unit_of_measurement=UnitOfTime.MINUTES,
mode=NumberMode.BOX,
set_native_value_fn=lambda device, value: device.update_max_daily_playtime(
minutes=value
),
value_fn=lambda device: device.limit_time,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: NintendoParentalControlsConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up number platform."""
async_add_devices(
NintendoParentalControlsNumberEntity(entry.runtime_data, device, entity)
for device in entry.runtime_data.api.devices.values()
for entity in NUMBER_DESCRIPTIONS
)
class NintendoParentalControlsNumberEntity(NintendoDevice, NumberEntity):
"""Represent a Nintendo Parental number entity."""
entity_description: NintendoParentalControlsNumberEntityDescription
def __init__(
self,
coordinator: NintendoUpdateCoordinator,
device: Device,
description: NintendoParentalControlsNumberEntityDescription,
) -> None:
"""Initialize the time entity."""
super().__init__(coordinator=coordinator, device=device, key=description.key)
self.entity_description = description
@property
def native_value(self) -> float | None:
"""Return the state of the entity."""
return self.entity_description.value_fn(self._device)
async def async_set_native_value(self, value: float) -> None:
"""Update entity state."""
await self.entity_description.set_native_value_fn(self._device, value)

View File

@@ -48,6 +48,11 @@
"suspend_software": {
"name": "Suspend software"
}
},
"number": {
"today_max_screentime": {
"name": "Max screentime today"
}
}
},
"exceptions": {

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/openrgb",
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "bronze",
"quality_scale": "silver",
"requirements": ["openrgb-python==0.3.5"]
}

View File

@@ -124,7 +124,11 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN):
return await self._async_step_plex_website_auth()
if self.show_advanced_options:
return await self.async_step_user_advanced(errors=errors)
return self.async_show_form(step_id="user", errors=errors)
return self.async_show_form(
step_id="user",
errors=errors,
description_placeholders={"plex_server_url": "[plex.tv](https://plex.tv)"},
)
async def async_step_user_advanced(
self,

View File

@@ -3,7 +3,7 @@
"flow_title": "{name} ({host})",
"step": {
"user": {
"description": "Continue to [plex.tv](https://plex.tv) to link a Plex server."
"description": "Continue to {plex_server_url} to link a Plex server."
},
"user_advanced": {
"data": {

View File

@@ -13,6 +13,7 @@ from pyportainer import (
PortainerTimeoutError,
)
from pyportainer.models.docker import DockerContainer
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
from homeassistant.config_entries import ConfigEntry
@@ -38,6 +39,8 @@ class PortainerCoordinatorData:
name: str | None
endpoint: Endpoint
containers: dict[str, DockerContainer]
docker_version: DockerVersion
docker_info: DockerInfo
class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorData]]):
@@ -120,6 +123,8 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
try:
containers = await self.portainer.get_containers(endpoint.id)
docker_version = await self.portainer.docker_version(endpoint.id)
docker_info = await self.portainer.docker_info(endpoint.id)
except PortainerConnectionError as err:
_LOGGER.exception("Connection error")
raise UpdateFailed(
@@ -140,6 +145,8 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
name=endpoint.name,
endpoint=endpoint,
containers={container.id: container for container in containers},
docker_version=docker_version,
docker_info=docker_info,
)
return mapped_endpoints

View File

@@ -3,6 +3,45 @@
"sensor": {
"image": {
"default": "mdi:docker"
},
"operating_system": {
"default": "mdi:chip"
},
"operating_system_version": {
"default": "mdi:alpha-v-box"
},
"api_version": {
"default": "mdi:api"
},
"kernel_version": {
"default": "mdi:memory"
},
"architecture": {
"default": "mdi:cpu-64-bit"
},
"containers_running": {
"default": "mdi:play-circle-outline"
},
"containers_stopped": {
"default": "mdi:stop-circle-outline"
},
"containers_paused": {
"default": "mdi:pause-circle"
},
"images_count": {
"default": "mdi:image-multiple"
},
"containers_count": {
"default": "mdi:database"
},
"memory_total": {
"default": "mdi:memory"
},
"docker_version": {
"default": "mdi:docker"
},
"cpu_total": {
"default": "mdi:cpu-64-bit"
}
},
"switch": {

View File

@@ -7,28 +7,149 @@ from dataclasses import dataclass
from pyportainer.models.docker import DockerContainer
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.components.sensor import (
EntityCategory,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
StateType,
)
from homeassistant.const import UnitOfInformation
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PortainerConfigEntry, PortainerCoordinator
from .entity import PortainerContainerEntity, PortainerCoordinatorData
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
PortainerEndpointEntity,
)
@dataclass(frozen=True, kw_only=True)
class PortainerSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer sensor description."""
class PortainerContainerSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer container sensor description."""
value_fn: Callable[[DockerContainer], str | None]
value_fn: Callable[[DockerContainer], StateType]
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
PortainerSensorEntityDescription(
@dataclass(frozen=True, kw_only=True)
class PortainerEndpointSensorEntityDescription(SensorEntityDescription):
"""Class to hold Portainer endpoint sensor description."""
value_fn: Callable[[PortainerCoordinatorData], StateType]
CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
PortainerContainerSensorEntityDescription(
key="image",
translation_key="image",
value_fn=lambda data: data.image,
),
)
ENDPOINT_SENSORS: tuple[PortainerEndpointSensorEntityDescription, ...] = (
PortainerEndpointSensorEntityDescription(
key="api_version",
translation_key="api_version",
value_fn=lambda data: data.docker_version.api_version,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="kernel_version",
translation_key="kernel_version",
value_fn=lambda data: data.docker_version.kernel_version,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="operating_system",
translation_key="operating_system",
value_fn=lambda data: data.docker_info.os_type,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="operating_system_version",
translation_key="operating_system_version",
value_fn=lambda data: data.docker_info.os_version,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="docker_version",
translation_key="docker_version",
value_fn=lambda data: data.docker_info.server_version,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="architecture",
translation_key="architecture",
value_fn=lambda data: data.docker_info.architecture,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="containers_count",
translation_key="containers_count",
value_fn=lambda data: data.docker_info.containers,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerEndpointSensorEntityDescription(
key="containers_running",
translation_key="containers_running",
value_fn=lambda data: data.docker_info.containers_running,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerEndpointSensorEntityDescription(
key="containers_stopped",
translation_key="containers_stopped",
value_fn=lambda data: data.docker_info.containers_stopped,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerEndpointSensorEntityDescription(
key="containers_paused",
translation_key="containers_paused",
value_fn=lambda data: data.docker_info.containers_paused,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerEndpointSensorEntityDescription(
key="images_count",
translation_key="images_count",
value_fn=lambda data: data.docker_info.images,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
PortainerEndpointSensorEntityDescription(
key="memory_total",
translation_key="memory_total",
value_fn=lambda data: data.docker_info.mem_total,
device_class=SensorDeviceClass.DATA_SIZE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfInformation.BYTES,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
PortainerEndpointSensorEntityDescription(
key="cpu_total",
translation_key="cpu_total",
value_fn=lambda data: data.docker_info.ncpu,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
)
async def async_setup_entry(
@@ -38,29 +159,41 @@ async def async_setup_entry(
) -> None:
"""Set up Portainer sensors based on a config entry."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = []
async_add_entities(
PortainerContainerSensor(
coordinator,
entity_description,
container,
endpoint,
for endpoint in coordinator.data.values():
entities.extend(
PortainerEndpointSensor(
coordinator,
entity_description,
endpoint,
)
for entity_description in ENDPOINT_SENSORS
)
for endpoint in coordinator.data.values()
for container in endpoint.containers.values()
for entity_description in CONTAINER_SENSORS
)
entities.extend(
PortainerContainerSensor(
coordinator,
entity_description,
container,
endpoint,
)
for container in endpoint.containers.values()
for entity_description in CONTAINER_SENSORS
)
async_add_entities(entities)
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
"""Representation of a Portainer container sensor."""
entity_description: PortainerSensorEntityDescription
entity_description: PortainerContainerSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerSensorEntityDescription,
entity_description: PortainerContainerSensorEntityDescription,
device_info: DockerContainer,
via_device: PortainerCoordinatorData,
) -> None:
@@ -76,8 +209,37 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
return super().available and self.endpoint_id in self.coordinator.data
@property
def native_value(self) -> str | None:
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(
self.coordinator.data[self.endpoint_id].containers[self.device_id]
)
class PortainerEndpointSensor(PortainerEndpointEntity, SensorEntity):
"""Representation of a Portainer endpoint sensor."""
entity_description: PortainerEndpointSensorEntityDescription
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: PortainerEndpointSensorEntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize the Portainer endpoint sensor."""
self.entity_description = entity_description
super().__init__(device_info, coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.device_id in self.coordinator.data
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
endpoint_data = self.coordinator.data[self._device_info.endpoint.id]
return self.entity_description.value_fn(endpoint_data)

View File

@@ -49,6 +49,45 @@
"sensor": {
"image": {
"name": "Image"
},
"operating_system": {
"name": "Operating system"
},
"operating_system_version": {
"name": "Operating system version"
},
"api_version": {
"name": "API version"
},
"kernel_version": {
"name": "Kernel version"
},
"architecture": {
"name": "Architecture"
},
"containers_running": {
"name": "Containers running"
},
"containers_stopped": {
"name": "Containers stopped"
},
"containers_paused": {
"name": "Containers paused"
},
"images_count": {
"name": "Image count"
},
"containers_count": {
"name": "Container count"
},
"memory_total": {
"name": "Total memory"
},
"docker_version": {
"name": "Docker version"
},
"cpu_total": {
"name": "Total CPU"
}
},
"switch": {

View File

@@ -1,5 +1,7 @@
"""Base entity for the Pterodactyl integration."""
from yarl import URL
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceInfo
@@ -33,7 +35,9 @@ class PterodactylEntity(CoordinatorEntity[PterodactylCoordinator]):
name=self.game_server_data.name,
model=self.game_server_data.name,
model_id=self.game_server_data.uuid,
configuration_url=f"{config_entry.data[CONF_URL]}/server/{identifier}",
configuration_url=str(
URL(config_entry.data[CONF_URL]) / "server" / identifier
),
)
@property

View File

@@ -3,14 +3,18 @@
from __future__ import annotations
import socket
from typing import Any
from typing import TYPE_CHECKING, Any
from aiohttp import ClientError, ClientResponseError
import aiosolaredge
from solaredge_web import SolarEdgeWeb
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import (
SOURCE_RECONFIGURE,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.data_entry_flow import section
@@ -91,17 +95,28 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Step when user initializes a integration."""
"""Step when user initializes an integration or reconfigures it."""
self._errors = {}
entry = None
if self.source == SOURCE_RECONFIGURE:
entry = self._get_reconfigure_entry()
if user_input is not None:
name = slugify(user_input.get(CONF_NAME, DEFAULT_NAME))
site_id = user_input[CONF_SITE_ID]
if self.source == SOURCE_RECONFIGURE:
if TYPE_CHECKING:
assert entry
site_id = entry.data[CONF_SITE_ID]
else:
site_id = user_input[CONF_SITE_ID]
api_auth = user_input.get(CONF_SECTION_API_AUTH, {})
web_auth = user_input.get(CONF_SECTION_WEB_AUTH, {})
api_key = api_auth.get(CONF_API_KEY)
username = web_auth.get(CONF_USERNAME)
if self._site_in_configuration_exists(site_id):
if self.source != SOURCE_RECONFIGURE and self._site_in_configuration_exists(
site_id
):
self._errors[CONF_SITE_ID] = "already_configured"
elif not api_key and not username:
self._errors["base"] = "auth_missing"
@@ -120,54 +135,92 @@ class SolarEdgeConfigFlow(ConfigFlow, domain=DOMAIN):
data = {CONF_SITE_ID: site_id}
data.update(api_auth)
data.update(web_auth)
if self.source == SOURCE_RECONFIGURE:
if TYPE_CHECKING:
assert entry
return self.async_update_reload_and_abort(entry, data=data)
return self.async_create_entry(title=name, data=data)
elif self.source == SOURCE_RECONFIGURE:
if TYPE_CHECKING:
assert entry
user_input = {
CONF_SECTION_API_AUTH: {CONF_API_KEY: entry.data.get(CONF_API_KEY, "")},
CONF_SECTION_WEB_AUTH: {
CONF_USERNAME: entry.data.get(CONF_USERNAME, ""),
CONF_PASSWORD: entry.data.get(CONF_PASSWORD, ""),
},
}
else:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME)
): str,
vol.Required(
CONF_SITE_ID, default=user_input.get(CONF_SITE_ID, "")
): str,
vol.Optional(CONF_SECTION_API_AUTH): section(
vol.Schema(
{
vol.Optional(
CONF_API_KEY,
default=user_input.get(
CONF_SECTION_API_AUTH, {}
).get(CONF_API_KEY, ""),
): str,
}
),
options={"collapsed": False},
data_schema_dict: dict[vol.Marker, Any] = {}
if self.source != SOURCE_RECONFIGURE:
data_schema_dict[
vol.Required(CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME))
] = str
data_schema_dict[
vol.Required(CONF_SITE_ID, default=user_input.get(CONF_SITE_ID, ""))
] = str
data_schema_dict.update(
{
vol.Optional(CONF_SECTION_API_AUTH): section(
vol.Schema(
{
vol.Optional(
CONF_API_KEY,
default=user_input.get(CONF_SECTION_API_AUTH, {}).get(
CONF_API_KEY, ""
),
): str,
}
),
vol.Optional(CONF_SECTION_WEB_AUTH): section(
vol.Schema(
{
vol.Inclusive(
CONF_USERNAME,
"web_account",
default=user_input.get(
CONF_SECTION_WEB_AUTH, {}
).get(CONF_USERNAME, ""),
): str,
vol.Inclusive(
CONF_PASSWORD,
"web_account",
default=user_input.get(
CONF_SECTION_WEB_AUTH, {}
).get(CONF_PASSWORD, ""),
): str,
}
),
options={"collapsed": False},
options={"collapsed": False},
),
vol.Optional(CONF_SECTION_WEB_AUTH): section(
vol.Schema(
{
vol.Inclusive(
CONF_USERNAME,
"web_account",
default=user_input.get(CONF_SECTION_WEB_AUTH, {}).get(
CONF_USERNAME, ""
),
): str,
vol.Inclusive(
CONF_PASSWORD,
"web_account",
default=user_input.get(CONF_SECTION_WEB_AUTH, {}).get(
CONF_PASSWORD, ""
),
): str,
}
),
}
),
errors=self._errors,
options={"collapsed": False},
),
}
)
data_schema = vol.Schema(data_schema_dict)
step_id = "user"
description_placeholders = {}
if self.source == SOURCE_RECONFIGURE:
if TYPE_CHECKING:
assert entry
step_id = "reconfigure"
description_placeholders["site_id"] = entry.data[CONF_SITE_ID]
return self.async_show_form(
step_id=step_id,
data_schema=data_schema,
errors=self._errors,
description_placeholders=description_placeholders,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a reconfiguration flow initiated by the user."""
return await self.async_step_user(user_input)

View File

@@ -33,6 +33,37 @@
}
}
}
},
"reconfigure": {
"title": "Reconfigure SolarEdge",
"description": "Update your API key or web account credentials for site {site_id}.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"api_key": "[%key:component::solaredge::config::step::user::data_description::api_key%]",
"username": "[%key:component::solaredge::config::step::user::data_description::username%]",
"password": "[%key:component::solaredge::config::step::user::data_description::password%]"
},
"sections": {
"api_auth": {
"name": "[%key:component::solaredge::config::step::user::sections::api_auth::name%]",
"description": "[%key:component::solaredge::config::step::user::sections::api_auth::description%]",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
}
},
"web_auth": {
"name": "[%key:component::solaredge::config::step::user::sections::web_auth::name%]",
"description": "[%key:component::solaredge::config::step::user::sections::web_auth::description%]",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
}
}
}
}
},
"error": {
@@ -45,7 +76,8 @@
"auth_missing": "You must provide either an API key or a username and password."
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
}
},
"entity": {

View File

@@ -149,7 +149,11 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
self._master_state is not None
and self.device.status.get(self._master_state.dpcode) == State.ALARM
):
return AlarmControlPanelState.TRIGGERED
# Only report as triggered if NOT a battery warning
if (
changed_by := self.changed_by
) is None or "Sensor Low Battery" not in changed_by:
return AlarmControlPanelState.TRIGGERED
if not (status := self.device.status.get(self.entity_description.key)):
return None

View File

@@ -115,7 +115,10 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
"""Return the currently set speed."""
current_level = self.device.state.fan_level
if self.device.state.mode == VS_FAN_MODE_MANUAL and current_level is not None:
if (
self.device.state.mode in (VS_FAN_MODE_MANUAL, VS_FAN_MODE_NORMAL)
and current_level is not None
):
if current_level == 0:
return 0
return ordered_list_item_to_percentage(
@@ -207,7 +210,7 @@ class VeSyncFanHA(VeSyncBaseEntity, FanEntity):
)
# Switch to manual mode if not already set
if self.device.state.mode != VS_FAN_MODE_MANUAL:
if self.device.state.mode not in (VS_FAN_MODE_MANUAL, VS_FAN_MODE_NORMAL):
if not await self.device.set_manual_mode():
raise HomeAssistantError(
"An error occurred while setting manual mode."

View File

@@ -4,15 +4,10 @@ from __future__ import annotations
import logging
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.smartglass.models import SmartglassConsoleList
from xbox.webapi.common.signed_session import SignedSession
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
from homeassistant.helpers import config_validation as cv
from . import api
from .const import DOMAIN
from .coordinator import XboxConfigEntry, XboxUpdateCoordinator
@@ -30,24 +25,8 @@ PLATFORMS = [
async def async_setup_entry(hass: HomeAssistant, entry: XboxConfigEntry) -> bool:
"""Set up xbox from a config entry."""
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
signed_session = await hass.async_add_executor_job(SignedSession)
auth = api.AsyncConfigEntryAuth(signed_session, session)
client = XboxLiveClient(auth)
consoles: SmartglassConsoleList = await client.smartglass.get_console_list()
_LOGGER.debug(
"Found %d consoles: %s",
len(consoles.result),
consoles.model_dump(),
)
coordinator = XboxUpdateCoordinator(hass, entry, client, consoles)
coordinator = XboxUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator

View File

@@ -33,6 +33,6 @@ class AsyncConfigEntryAuth(AuthenticationManager):
tokens = {**self._oauth_session.token}
issued = tokens["expires_at"] - tokens["expires_in"]
del tokens["expires_at"]
token_response = OAuth2TokenResponse.parse_obj(tokens)
token_response = OAuth2TokenResponse.model_validate(tokens)
token_response.issued = utc_from_timestamp(issued)
return token_response

View File

@@ -2,17 +2,84 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from functools import partial
from homeassistant.components.binary_sensor import BinarySensorEntity
from yarl import URL
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import XboxConfigEntry, XboxUpdateCoordinator
from .coordinator import PresenceData, XboxConfigEntry, XboxUpdateCoordinator
from .entity import XboxBaseEntity
PRESENCE_ATTRIBUTES = ["online", "in_party", "in_game", "in_multiplayer"]
class XboxBinarySensor(StrEnum):
"""Xbox binary sensor."""
ONLINE = "online"
IN_PARTY = "in_party"
IN_GAME = "in_game"
IN_MULTIPLAYER = "in_multiplayer"
@dataclass(kw_only=True, frozen=True)
class XboxBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Xbox binary sensor description."""
is_on_fn: Callable[[PresenceData], bool | None]
entity_picture_fn: Callable[[PresenceData], str | None] | None = None
def profile_pic(data: PresenceData) -> str | None:
"""Return the gamer pic."""
# Xbox sometimes returns a domain that uses a wrong certificate which
# creates issues with loading the image.
# The correct domain is images-eds-ssl which can just be replaced
# to point to the correct image, with the correct domain and certificate.
# We need to also remove the 'mode=Padding' query because with it,
# it results in an error 400.
url = URL(data.display_pic)
if url.host == "images-eds.xboxlive.com":
url = url.with_host("images-eds-ssl.xboxlive.com").with_scheme("https")
query = dict(url.query)
query.pop("mode", None)
return str(url.with_query(query))
SENSOR_DESCRIPTIONS: tuple[XboxBinarySensorEntityDescription, ...] = (
XboxBinarySensorEntityDescription(
key=XboxBinarySensor.ONLINE,
translation_key=XboxBinarySensor.ONLINE,
is_on_fn=lambda x: x.online,
name=None,
entity_picture_fn=profile_pic,
),
XboxBinarySensorEntityDescription(
key=XboxBinarySensor.IN_PARTY,
translation_key=XboxBinarySensor.IN_PARTY,
is_on_fn=lambda x: x.in_party,
entity_registry_enabled_default=False,
),
XboxBinarySensorEntityDescription(
key=XboxBinarySensor.IN_GAME,
translation_key=XboxBinarySensor.IN_GAME,
is_on_fn=lambda x: x.in_game,
),
XboxBinarySensorEntityDescription(
key=XboxBinarySensor.IN_MULTIPLAYER,
translation_key=XboxBinarySensor.IN_MULTIPLAYER,
is_on_fn=lambda x: x.in_multiplayer,
entity_registry_enabled_default=False,
),
)
async def async_setup_entry(
@@ -33,13 +100,23 @@ async def async_setup_entry(
class XboxBinarySensorEntity(XboxBaseEntity, BinarySensorEntity):
"""Representation of a Xbox presence state."""
@property
def is_on(self) -> bool:
"""Return the status of the requested attribute."""
if not self.coordinator.last_update_success:
return False
entity_description: XboxBinarySensorEntityDescription
return getattr(self.data, self.attribute, False)
@property
def is_on(self) -> bool | None:
"""Return the status of the requested attribute."""
return self.entity_description.is_on_fn(self.data)
@property
def entity_picture(self) -> str | None:
"""Return the gamer pic."""
return (
fn(self.data)
if (fn := self.entity_description.entity_picture_fn) is not None
else super().entity_picture
)
@callback
@@ -56,29 +133,13 @@ def async_update_friends(
new_entities: list[XboxBinarySensorEntity] = []
for xuid in new_ids - current_ids:
current[xuid] = [
XboxBinarySensorEntity(coordinator, xuid, attribute)
for attribute in PRESENCE_ATTRIBUTES
XboxBinarySensorEntity(coordinator, xuid, description)
for description in SENSOR_DESCRIPTIONS
]
new_entities = new_entities + current[xuid]
async_add_entities(new_entities)
if new_entities:
async_add_entities(new_entities)
# Process deleted favorites, remove them from Home Assistant
for xuid in current_ids - new_ids:
coordinator.hass.async_create_task(
async_remove_entities(xuid, coordinator, current)
)
async def async_remove_entities(
xuid: str,
coordinator: XboxUpdateCoordinator,
current: dict[str, list[XboxBinarySensorEntity]],
) -> None:
"""Remove friend sensors from Home Assistant."""
registry = er.async_get(coordinator.hass)
entities = current[xuid]
for entity in entities:
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[xuid]
del current[xuid]

View File

@@ -2,10 +2,11 @@
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import dataclass, field
from datetime import timedelta
import logging
from httpx import HTTPStatusError, RequestError, TimeoutException
from xbox.webapi.api.client import XboxLiveClient
from xbox.webapi.api.provider.catalog.const import SYSTEM_PFN_ID_MAP
from xbox.webapi.api.provider.catalog.models import AlternateIdType, Product
@@ -18,11 +19,15 @@ from xbox.webapi.api.provider.smartglass.models import (
SmartglassConsoleList,
SmartglassConsoleStatus,
)
from xbox.webapi.common.signed_session import SignedSession
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_entry_oauth2_flow, device_registry as dr
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import api
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -59,21 +64,21 @@ class PresenceData:
class XboxData:
"""Xbox dataclass for update coordinator."""
consoles: dict[str, ConsoleData]
presence: dict[str, PresenceData]
consoles: dict[str, ConsoleData] = field(default_factory=dict)
presence: dict[str, PresenceData] = field(default_factory=dict)
class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
"""Store Xbox Console Status."""
config_entry: ConfigEntry
consoles: SmartglassConsoleList
client: XboxLiveClient
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
client: XboxLiveClient,
consoles: SmartglassConsoleList,
) -> None:
"""Initialize."""
super().__init__(
@@ -83,9 +88,51 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
name=DOMAIN,
update_interval=timedelta(seconds=10),
)
self.data = XboxData({}, {})
self.client: XboxLiveClient = client
self.consoles: SmartglassConsoleList = consoles
self.data = XboxData()
self.current_friends: set[str] = set()
async def _async_setup(self) -> None:
"""Set up coordinator."""
try:
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
self.hass, self.config_entry
)
)
except ValueError as e:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="request_exception",
translation_placeholders={"error": str(e)},
) from e
session = config_entry_oauth2_flow.OAuth2Session(
self.hass, self.config_entry, implementation
)
signed_session = await self.hass.async_add_executor_job(SignedSession)
auth = api.AsyncConfigEntryAuth(signed_session, session)
self.client = XboxLiveClient(auth)
try:
self.consoles = await self.client.smartglass.get_console_list()
except TimeoutException as e:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_exception",
) from e
except (RequestError, HTTPStatusError) as e:
_LOGGER.debug("Xbox exception:", exc_info=True)
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="request_exception",
translation_placeholders={"error": str(e)},
) from e
_LOGGER.debug(
"Found %d consoles: %s",
len(self.consoles.result),
self.consoles.model_dump(),
)
async def _async_update_data(self) -> XboxData:
"""Fetch the latest console status."""
@@ -100,7 +147,7 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
_LOGGER.debug(
"%s status: %s",
console.name,
status.dict(),
status.model_dump(),
)
# Setup focus app
@@ -147,8 +194,33 @@ class XboxUpdateCoordinator(DataUpdateCoordinator[XboxData]):
presence_data[friend.xuid] = _build_presence_data(friend)
if (
self.current_friends
- (new_friends := {x.xuid for x in presence_data.values()})
or not self.current_friends
):
self.remove_stale_devices(presence_data)
self.current_friends = new_friends
return XboxData(new_console_data, presence_data)
def remove_stale_devices(self, presence_data: dict[str, PresenceData]) -> None:
"""Remove stale devices from registry."""
device_reg = dr.async_get(self.hass)
identifiers = {(DOMAIN, person.xuid) for person in presence_data.values()} | {
(DOMAIN, console.id) for console in self.consoles.result
}
for device in dr.async_entries_for_config_entry(
device_reg, self.config_entry.entry_id
):
if not set(device.identifiers) & identifiers:
_LOGGER.debug("Removing stale device %s", device.name)
device_reg.async_update_device(
device.id, remove_config_entry_id=self.config_entry.entry_id
)
def _build_presence_data(person: Person) -> PresenceData:
"""Build presence data from a person."""

View File

@@ -2,9 +2,8 @@
from __future__ import annotations
from yarl import URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
@@ -14,55 +13,30 @@ from .coordinator import PresenceData, XboxUpdateCoordinator
class XboxBaseEntity(CoordinatorEntity[XboxUpdateCoordinator]):
"""Base Sensor for the Xbox Integration."""
_attr_has_entity_name = True
def __init__(
self, coordinator: XboxUpdateCoordinator, xuid: str, attribute: str
self,
coordinator: XboxUpdateCoordinator,
xuid: str,
entity_description: EntityDescription,
) -> None:
"""Initialize Xbox binary sensor."""
super().__init__(coordinator)
self.xuid = xuid
self.attribute = attribute
self._attr_unique_id = f"{xuid}_{attribute}"
self._attr_entity_registry_enabled_default = attribute == "online"
self.entity_description = entity_description
self._attr_unique_id = f"{xuid}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, "xbox_live")},
identifiers={(DOMAIN, xuid)},
manufacturer="Microsoft",
model="Xbox Live",
name="Xbox Live",
model="Xbox Network",
name=self.data.gamertag,
)
@property
def data(self) -> PresenceData | None:
def data(self) -> PresenceData:
"""Return coordinator data for this console."""
return self.coordinator.data.presence.get(self.xuid)
@property
def name(self) -> str | None:
"""Return the name of the sensor."""
if not self.data:
return None
if self.attribute == "online":
return self.data.gamertag
attr_name = " ".join([part.title() for part in self.attribute.split("_")])
return f"{self.data.gamertag} {attr_name}"
@property
def entity_picture(self) -> str | None:
"""Return the gamer pic."""
if not self.data:
return None
# Xbox sometimes returns a domain that uses a wrong certificate which
# creates issues with loading the image.
# The correct domain is images-eds-ssl which can just be replaced
# to point to the correct image, with the correct domain and certificate.
# We need to also remove the 'mode=Padding' query because with it,
# it results in an error 400.
url = URL(self.data.display_pic)
if url.host == "images-eds.xboxlive.com":
url = url.with_host("images-eds-ssl.xboxlive.com").with_scheme("https")
query = dict(url.query)
query.pop("mode", None)
return str(url.with_query(query))
return self.coordinator.data.presence[self.xuid]

View File

@@ -0,0 +1,32 @@
{
"entity": {
"sensor": {
"status": {
"default": "mdi:message-text-outline"
},
"gamer_score": {
"default": "mdi:alpha-g-circle"
},
"account_tier": {
"default": "mdi:microsoft-xbox"
},
"gold_tenure": {
"default": "mdi:microsoft-xbox"
}
},
"binary_sensor": {
"online": {
"default": "mdi:account"
},
"in_party": {
"default": "mdi:account-group"
},
"in_game": {
"default": "mdi:microsoft-xbox-controller"
},
"in_multiplayer": {
"default": "mdi:account-multiple"
}
}
}
}

View File

@@ -1,18 +1,61 @@
"""Xbox friends binary sensors."""
"""Sensor platform for the Xbox integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from functools import partial
from homeassistant.components.sensor import SensorEntity
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import XboxConfigEntry, XboxUpdateCoordinator
from .coordinator import PresenceData, XboxConfigEntry, XboxUpdateCoordinator
from .entity import XboxBaseEntity
SENSOR_ATTRIBUTES = ["status", "gamer_score", "account_tier", "gold_tenure"]
class XboxSensor(StrEnum):
"""Xbox sensor."""
STATUS = "status"
GAMER_SCORE = "gamer_score"
ACCOUNT_TIER = "account_tier"
GOLD_TENURE = "gold_tenure"
@dataclass(kw_only=True, frozen=True)
class XboxSensorEntityDescription(SensorEntityDescription):
"""Xbox sensor description."""
value_fn: Callable[[PresenceData], StateType]
SENSOR_DESCRIPTIONS: tuple[XboxSensorEntityDescription, ...] = (
XboxSensorEntityDescription(
key=XboxSensor.STATUS,
translation_key=XboxSensor.STATUS,
value_fn=lambda x: x.status,
),
XboxSensorEntityDescription(
key=XboxSensor.GAMER_SCORE,
translation_key=XboxSensor.GAMER_SCORE,
value_fn=lambda x: x.gamer_score,
),
XboxSensorEntityDescription(
key=XboxSensor.ACCOUNT_TIER,
translation_key=XboxSensor.ACCOUNT_TIER,
entity_registry_enabled_default=False,
value_fn=lambda x: x.account_tier,
),
XboxSensorEntityDescription(
key=XboxSensor.GOLD_TENURE,
translation_key=XboxSensor.GOLD_TENURE,
entity_registry_enabled_default=False,
value_fn=lambda x: x.gold_tenure,
),
)
async def async_setup_entry(
@@ -32,13 +75,12 @@ async def async_setup_entry(
class XboxSensorEntity(XboxBaseEntity, SensorEntity):
"""Representation of a Xbox presence state."""
@property
def native_value(self):
"""Return the state of the requested attribute."""
if not self.coordinator.last_update_success:
return None
entity_description: XboxSensorEntityDescription
return getattr(self.data, self.attribute, None)
@property
def native_value(self) -> StateType:
"""Return the state of the requested attribute."""
return self.entity_description.value_fn(self.data)
@callback
@@ -55,29 +97,13 @@ def async_update_friends(
new_entities: list[XboxSensorEntity] = []
for xuid in new_ids - current_ids:
current[xuid] = [
XboxSensorEntity(coordinator, xuid, attribute)
for attribute in SENSOR_ATTRIBUTES
XboxSensorEntity(coordinator, xuid, description)
for description in SENSOR_DESCRIPTIONS
]
new_entities = new_entities + current[xuid]
async_add_entities(new_entities)
if new_entities:
async_add_entities(new_entities)
# Process deleted favorites, remove them from Home Assistant
for xuid in current_ids - new_ids:
coordinator.hass.async_create_task(
async_remove_entities(xuid, coordinator, current)
)
async def async_remove_entities(
xuid: str,
coordinator: XboxUpdateCoordinator,
current: dict[str, list[XboxSensorEntity]],
) -> None:
"""Remove friend sensors from Home Assistant."""
registry = er.async_get(coordinator.hass)
entities = current[xuid]
for entity in entities:
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[xuid]
del current[xuid]

View File

@@ -23,5 +23,41 @@
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
}
},
"entity": {
"sensor": {
"status": {
"name": "Status"
},
"gamer_score": {
"name": "Gamerscore",
"unit_of_measurement": "points"
},
"account_tier": {
"name": "Account tier"
},
"gold_tenure": {
"name": "Gold tenure"
}
},
"binary_sensor": {
"in_party": {
"name": "In party"
},
"in_game": {
"name": "In game"
},
"in_multiplayer": {
"name": "In multiplayer"
}
}
},
"exceptions": {
"request_exception": {
"message": "Failed to connect to Xbox Network: {error}"
},
"timeout_exception": {
"message": "Failed to connect to Xbox Network due to a connection timeout"
}
}
}

View File

@@ -15,6 +15,8 @@ import requests
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
@@ -44,7 +46,6 @@ if sys.version_info < (3, 14):
_LOGGER = logging.getLogger(__name__)
ATTR_DATA = "data"
ATTR_PATH = "path"
ATTR_PATH_TEMPLATE = "path_template"
ATTR_TIMEOUT = "timeout"
@@ -112,13 +113,14 @@ class XmppNotificationService(BaseNotificationService):
"""Send a message to a user."""
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
text = f"{title}: {message}" if title else message
targets = kwargs.get(ATTR_TARGET, self._recipients)
data = kwargs.get(ATTR_DATA)
timeout = data.get(ATTR_TIMEOUT, XEP_0363_TIMEOUT) if data else None
await async_send_message(
f"{self._sender}/{self._resource}",
self._password,
self._recipients,
targets,
self._tls,
self._verify,
self._room,

View File

@@ -190,12 +190,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
homekit_model_lookup, homekit_model_matchers = build_homekit_model_lookups(
homekit_models
)
local_service_info = await _async_get_local_service_info(hass)
discovery = ZeroconfDiscovery(
hass,
zeroconf,
zeroconf_types,
homekit_model_lookup,
homekit_model_matchers,
local_service_info,
)
await discovery.async_setup()
hass.data[DATA_DISCOVERY] = discovery
@@ -206,8 +208,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
Wait till started or otherwise HTTP is not up and running.
"""
uuid = await instance_id.async_get(hass)
await _async_register_hass_zc_service(hass, aio_zc, uuid)
await _async_register_hass_zc_service(aio_zc, local_service_info)
async def _async_zeroconf_hass_stop(_event: Event) -> None:
await discovery.async_stop()
@@ -227,48 +228,12 @@ def _filter_disallowed_characters(name: str) -> str:
async def _async_register_hass_zc_service(
hass: HomeAssistant, aio_zc: HaAsyncZeroconf, uuid: str
aio_zc: HaAsyncZeroconf, local_service_info: AsyncServiceInfo
) -> None:
# Get instance UUID
valid_location_name = _truncate_location_name_to_valid(
_filter_disallowed_characters(hass.config.location_name or "Home")
)
params = {
"location_name": valid_location_name,
"uuid": uuid,
"version": __version__,
"external_url": "",
"internal_url": "",
# Old base URL, for backward compatibility
"base_url": "",
# Always needs authentication
"requires_api_password": True,
}
# Get instance URL's
with suppress(NoURLAvailableError):
params["external_url"] = get_url(hass, allow_internal=False)
with suppress(NoURLAvailableError):
params["internal_url"] = get_url(hass, allow_external=False)
# Set old base URL based on external or internal
params["base_url"] = params["external_url"] or params["internal_url"]
_suppress_invalid_properties(params)
info = AsyncServiceInfo(
ZEROCONF_TYPE,
name=f"{valid_location_name}.{ZEROCONF_TYPE}",
server=f"{uuid}.local.",
parsed_addresses=await network.async_get_announce_addresses(hass),
port=hass.http.server_port,
properties=params,
)
"""Register the zeroconf service for the local Home Assistant instance."""
_LOGGER.info("Starting Zeroconf broadcast")
await aio_zc.async_register_service(info, allow_name_change=True)
await aio_zc.async_register_service(local_service_info, allow_name_change=True)
def _suppress_invalid_properties(properties: dict) -> None:
@@ -307,6 +272,47 @@ def _truncate_location_name_to_valid(location_name: str) -> str:
return location_name.encode("utf-8")[:MAX_NAME_LEN].decode("utf-8", "ignore")
async def _async_get_local_service_info(hass: HomeAssistant) -> AsyncServiceInfo:
"""Return the zeroconf service info for the local Home Assistant instance."""
valid_location_name = _truncate_location_name_to_valid(
_filter_disallowed_characters(hass.config.location_name or "Home")
)
uuid = await instance_id.async_get(hass)
params = {
"location_name": valid_location_name,
"uuid": uuid,
"version": __version__,
"external_url": "",
"internal_url": "",
# Old base URL, for backward compatibility
"base_url": "",
# Always needs authentication
"requires_api_password": True,
}
# Get instance URL's
with suppress(NoURLAvailableError):
params["external_url"] = get_url(hass, allow_internal=False)
with suppress(NoURLAvailableError):
params["internal_url"] = get_url(hass, allow_external=False)
# Set old base URL based on external or internal
params["base_url"] = params["external_url"] or params["internal_url"]
_suppress_invalid_properties(params)
return AsyncServiceInfo(
ZEROCONF_TYPE,
name=f"{valid_location_name}.{ZEROCONF_TYPE}",
server=f"{uuid}.local.",
parsed_addresses=await network.async_get_announce_addresses(hass),
port=hass.http.server_port,
properties=params,
)
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(

View File

@@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import discovery_flow
from homeassistant.helpers.discovery_flow import DiscoveryKey
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.helpers.issue_registry as ir
from homeassistant.helpers.service_info.zeroconf import (
ZeroconfServiceInfo as _ZeroconfServiceInfo,
)
@@ -49,6 +50,8 @@ ATTR_DOMAIN: Final = "domain"
ATTR_NAME: Final = "name"
ATTR_PROPERTIES: Final = "properties"
DUPLICATE_INSTANCE_ID_ISSUE_ID = "duplicate_instance_id"
DATA_DISCOVERY: HassKey[ZeroconfDiscovery] = HassKey("zeroconf_discovery")
@@ -183,6 +186,7 @@ class ZeroconfDiscovery:
zeroconf_types: dict[str, list[ZeroconfMatcher]],
homekit_model_lookups: dict[str, HomeKitDiscoveredIntegration],
homekit_model_matchers: dict[re.Pattern, HomeKitDiscoveredIntegration],
local_service_info: AsyncServiceInfo,
) -> None:
"""Init discovery."""
self.hass = hass
@@ -193,6 +197,11 @@ class ZeroconfDiscovery:
self.async_service_browser: AsyncServiceBrowser | None = None
self._service_update_listeners: set[Callable[[AsyncServiceInfo], None]] = set()
self._service_removed_listeners: set[Callable[[str], None]] = set()
self._conflicting_instances: set[str] = set()
self._local_service_info = info_from_service(local_service_info)
self._local_ips: set[IPv4Address | IPv6Address] = set()
if self._local_service_info:
self._local_ips = set(self._local_service_info.ip_addresses)
@callback
def async_register_service_update_listener(
@@ -278,6 +287,16 @@ class ZeroconfDiscovery:
)
if state_change is ServiceStateChange.Removed:
# Check if other Home Assistant instances has been removed.
# Then we can remove the duplicate instance ID issue
# as probably the conflicting instance has been shut down
if service_type == ZEROCONF_TYPE and name in self._conflicting_instances:
self._conflicting_instances.remove(name)
if len(self._conflicting_instances) == 0:
ir.async_delete_issue(
self.hass, DOMAIN, DUPLICATE_INSTANCE_ID_ISSUE_ID
)
self._async_dismiss_discoveries(name)
for listener in self._service_removed_listeners:
listener(name)
@@ -336,6 +355,13 @@ class ZeroconfDiscovery:
return
_LOGGER.debug("Discovered new device %s %s", name, info)
props: dict[str, str | None] = info.properties
# Instance ID conflict detection for Home Assistant core
if service_type == ZEROCONF_TYPE and (
discovered_instance_id := props.get("uuid")
):
self._async_check_instance_id_conflict(discovered_instance_id, info)
discovery_key = DiscoveryKey(
domain=DOMAIN,
key=(info.type, info.name),
@@ -408,3 +434,59 @@ class ZeroconfDiscovery:
info,
discovery_key=discovery_key,
)
@callback
def _async_check_instance_id_conflict(
self, discovered_instance_id: str, info: _ZeroconfServiceInfo
) -> None:
"""Check for instance ID conflicts and create repair issues if needed."""
if not self._local_service_info:
_LOGGER.debug(
"No local service info, cannot check for instance ID conflicts"
)
return
discovered_ips = set(info.ip_addresses)
is_disjoint = self._local_ips.isdisjoint(discovered_ips)
local_instance_id = self._local_service_info.properties.get("uuid")
if not is_disjoint:
# No conflict, IP addresses of service contain a local IP
# Ignore it as it's probably a mDNS reflection
return
if discovered_instance_id != local_instance_id:
# Conflict resolved, different instance IDs
# No conflict, different instance IDs
# If there was a conflict issue before, we remove it
# since the other instance may have changed its ID
if info.name in self._conflicting_instances:
self._conflicting_instances.remove(info.name)
if len(self._conflicting_instances) == 0:
ir.async_delete_issue(self.hass, DOMAIN, DUPLICATE_INSTANCE_ID_ISSUE_ID)
return
# Conflict detected, create repair issue
_joined_ips = ", ".join(str(ip_address) for ip_address in discovered_ips)
_LOGGER.warning(
"Discovered another Home Assistant instance with the same instance ID (%s) at %s",
discovered_instance_id,
_joined_ips,
)
self._conflicting_instances.add(info.name)
ir.async_create_issue(
self.hass,
DOMAIN,
DUPLICATE_INSTANCE_ID_ISSUE_ID,
is_fixable=True,
is_persistent=False,
severity=ir.IssueSeverity.ERROR,
translation_key=DUPLICATE_INSTANCE_ID_ISSUE_ID,
translation_placeholders={
"instance_id": local_instance_id,
"other_ip": _joined_ips,
"other_host_url": info.hostname.rstrip("."),
},
)

View File

@@ -0,0 +1,60 @@
"""Repairs for the zeroconf integration."""
from __future__ import annotations
from homeassistant import data_entry_flow
from homeassistant.components.homeassistant import (
DOMAIN as DOMAIN_HOMEASSISTANT,
SERVICE_HOMEASSISTANT_RESTART,
)
from homeassistant.components.repairs import RepairsFlow
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import instance_id, issue_registry as ir
class DuplicateInstanceIDRepairFlow(RepairsFlow):
"""Handler for duplicate instance ID repair."""
@callback
def _async_get_placeholders(self) -> dict[str, str]:
issue_registry = ir.async_get(self.hass)
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
assert issue is not None
return issue.translation_placeholders or {}
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the initial step."""
return await self.async_step_confirm_recreate()
async def async_step_confirm_recreate(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the confirm step."""
if user_input is not None:
await instance_id.async_recreate(self.hass)
await self.hass.services.async_call(
DOMAIN_HOMEASSISTANT, SERVICE_HOMEASSISTANT_RESTART
)
return self.async_create_entry(title="", data={})
return self.async_show_form(
step_id="confirm_recreate",
description_placeholders=self._async_get_placeholders(),
)
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
data: dict[str, str | int | float | None] | None,
) -> RepairsFlow:
"""Create flow."""
if issue_id == "duplicate_instance_id":
return DuplicateInstanceIDRepairFlow()
# If Zeroconf adds confirm-only repairs in the future, this should be changed
# to return a ConfirmRepairFlow instead of raising a ValueError
raise ValueError(f"unknown repair {issue_id}")

View File

@@ -0,0 +1,14 @@
{
"issues": {
"duplicate_instance_id": {
"title": "Duplicate Home Assistant instance detected on your network",
"fix_flow": {
"step": {
"confirm_recreate": {
"description": "Another device ({other_ip}) on your network is advertising the same Home Assistant instance ID ({instance_id} reachable via {other_host_url}) as this instance. This can cause network instability and excessive traffic.\n\nTo fix this issue:\n1. Change the instance ID on **only one** of the Home Assistant instances.\n2. Once the conflict is resolved, the repair issue on the other instance will disappear automatically.\n\nAfter confirming, a new instance ID will be generated for this Home Assistant instance and the instance will restart. This will not affect your configuration or data, but it may take a few minutes for other devices on your network to recognize the change.\n\nTo proceed, click 'Submit' below."
}
}
}
}
}
}

View File

@@ -47,3 +47,14 @@ async def async_get(hass: HomeAssistant) -> str:
await store.async_save(data)
return data["uuid"]
async def async_recreate(hass: HomeAssistant) -> str:
"""Recreate a new unique ID for the hass instance."""
store = storage.Store[dict[str, str]](hass, DATA_VERSION, DATA_KEY, True)
data = {"uuid": uuid.uuid4().hex}
await store.async_save(data)
return data["uuid"]

View File

@@ -7,6 +7,7 @@ import asyncio
from collections import OrderedDict
from collections.abc import Callable, Mapping, Sequence
from glob import glob
import json
import logging
import os
from typing import Any
@@ -82,17 +83,60 @@ def run(script_args: list) -> int:
parser.add_argument(
"-s", "--secrets", action="store_true", help="Show secret information"
)
parser.add_argument("--json", action="store_true", help="Output JSON format")
parser.add_argument(
"--fail-on-warnings",
action="store_true",
help="Exit non-zero if warnings are present",
)
args, unknown = parser.parse_known_args()
args, unknown = parser.parse_known_args(script_args)
if unknown:
print(color("red", "Unknown arguments:", ", ".join(unknown)))
config_dir = os.path.join(os.getcwd(), args.config)
print(color("bold", "Testing configuration at", config_dir))
if not args.json:
print(color("bold", "Testing configuration at", config_dir))
res = check(config_dir, args.secrets)
# JSON output branch
if args.json:
json_object = {
"config_dir": config_dir,
"total_errors": sum(len(errors) for errors in res["except"].values()),
"total_warnings": sum(len(warnings) for warnings in res["warn"].values()),
"errors": res["except"],
"warnings": res["warn"],
"components": list(res["components"].keys()),
}
# Include secrets information if requested
if args.secrets:
# Build list of missing secrets (referenced but not found)
missing_secrets = [
key for key, val in res["secrets"].items() if val is None
]
# Build list of used secrets (found and used)
used_secrets = [
key for key, val in res["secrets"].items() if val is not None
]
json_object["secrets"] = {
"secret_files": res["secret_cache"],
"used_secrets": used_secrets,
"missing_secrets": missing_secrets,
"total_secrets": len(res["secrets"]),
"total_missing": len(missing_secrets),
}
print(json.dumps(json_object, indent=2))
# Determine exit code for JSON mode
return 1 if res["except"] or (args.fail_on_warnings and res["warn"]) else 0
domain_info: list[str] = []
if args.info:
domain_info = args.info.split(",")
@@ -165,7 +209,8 @@ def run(script_args: list) -> int:
continue
print(" -", skey + ":", sval)
return len(res["except"])
# Determine final exit code
return 1 if res["except"] or (args.fail_on_warnings and res["warn"]) else 0
def check(config_dir, secrets=False):

4
requirements_all.txt generated
View File

@@ -213,7 +213,7 @@ aioaseko==1.0.0
aioasuswrt==1.5.1
# homeassistant.components.husqvarna_automower
aioautomower==2.2.1
aioautomower==2.3.1
# homeassistant.components.azure_devops
aioazuredevops==2.2.2
@@ -1945,7 +1945,7 @@ pycsspeechtts==1.0.8
# pycups==2.0.4
# homeassistant.components.cync
pycync==0.4.1
pycync==0.4.2
# homeassistant.components.daikin
pydaikin==2.17.1

View File

@@ -201,7 +201,7 @@ aioaseko==1.0.0
aioasuswrt==1.5.1
# homeassistant.components.husqvarna_automower
aioautomower==2.2.1
aioautomower==2.3.1
# homeassistant.components.azure_devops
aioazuredevops==2.2.2
@@ -1638,7 +1638,7 @@ pycsspeechtts==1.0.8
# pycups==2.0.4
# homeassistant.components.cync
pycync==0.4.1
pycync==0.4.2
# homeassistant.components.daikin
pydaikin==2.17.1

View File

@@ -1,9 +1,11 @@
"""Common fixtures for the Bluesound tests."""
from __future__ import annotations
from collections.abc import AsyncGenerator, Generator
from dataclasses import dataclass
import ipaddress
from typing import Any
from typing import Any, Self
from unittest.mock import AsyncMock, patch
from pyblu import Input, Player, Preset, Status, SyncStatus
@@ -27,8 +29,8 @@ class PlayerMockData:
status_long_polling_mock: LongPollingMock[Status]
sync_status_long_polling_mock: LongPollingMock[SyncStatus]
@staticmethod
async def generate(host: str) -> "PlayerMockData":
@classmethod
async def generate(cls, host: str) -> Self:
"""Generate player mock data."""
host_ip = ipaddress.ip_address(host)
assert host_ip.version == 4
@@ -110,7 +112,7 @@ class PlayerMockData:
]
)
return PlayerMockData(
return cls(
host, player, status_long_polling_mock, sync_status_long_polling_mock
)

View File

@@ -1,4 +1,4 @@
"""Test the Switch config flow."""
"""Test the Group config flow."""
from typing import Any
from unittest.mock import patch
@@ -60,6 +60,7 @@ from tests.typing import WebSocketGenerator
),
("switch", "on", "on", {}, {}, {"all": False}, {}),
("switch", "on", "on", {}, {"all": True}, {"all": True}, {}),
("valve", "open", "open", {}, {}, {}, {}),
],
)
async def test_config_flow(
@@ -148,6 +149,7 @@ async def test_config_flow(
("notify", {}),
("media_player", {}),
("switch", {}),
("valve", {}),
],
)
async def test_config_flow_hides_members(
@@ -222,6 +224,7 @@ async def test_config_flow_hides_members(
{"ignore_non_numeric": False, "type": "sum"},
),
("switch", "on", {"all": False}, {}),
("valve", "open", {}, {}),
],
)
async def test_options(
@@ -404,6 +407,7 @@ async def test_all_options(
("notify", {}),
("media_player", {}),
("switch", {}),
("valve", {}),
],
)
async def test_options_flow_hides_members(
@@ -487,6 +491,7 @@ LOCK_ATTRS = [{"supported_features": 1}, {}]
NOTIFY_ATTRS = [{"supported_features": 0}, {}]
MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {}]
SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"}]
VALVE_ATTRS = [{"supported_features": 0}, {}]
@pytest.mark.parametrize(
@@ -503,6 +508,7 @@ SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"
("media_player", {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS),
("sensor", {"type": "max"}, ["10", "20"], "20.0", SENSOR_ATTRS),
("switch", {}, ["on", "off"], "on", [{}, {}]),
("valve", {}, ["open", "closed"], "open", VALVE_ATTRS),
],
)
async def test_config_flow_preview(
@@ -621,6 +627,7 @@ async def test_config_flow_preview(
SENSOR_ATTRS,
),
("switch", {}, {}, ["on", "off"], "on", [{}, {}]),
("valve", {}, {}, ["open", "closed"], "open", VALVE_ATTRS),
],
)
async def test_option_flow_preview(

View File

@@ -0,0 +1,688 @@
"""The tests for the group valve platform."""
import asyncio
from datetime import timedelta
from typing import Any
from unittest.mock import patch
import pytest
from homeassistant.components.group.valve import DEFAULT_NAME
from homeassistant.components.valve import (
ATTR_CURRENT_POSITION,
ATTR_POSITION,
DOMAIN as VALVE_DOMAIN,
ValveState,
)
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
CONF_UNIQUE_ID,
SERVICE_CLOSE_VALVE,
SERVICE_OPEN_VALVE,
SERVICE_SET_VALVE_POSITION,
SERVICE_STOP_VALVE,
SERVICE_TOGGLE,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import assert_setup_component, async_fire_time_changed
VALVE_GROUP = "valve.valve_group"
DEMO_VALVE1 = "valve.front_garden"
DEMO_VALVE2 = "valve.orchard"
DEMO_VALVE_POS1 = "valve.back_garden"
DEMO_VALVE_POS2 = "valve.trees"
CONFIG_ALL = {
VALVE_DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_VALVE1, DEMO_VALVE2, DEMO_VALVE_POS1, DEMO_VALVE_POS2],
},
]
}
CONFIG_POS = {
VALVE_DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_VALVE_POS1, DEMO_VALVE_POS2],
},
]
}
CONFIG_ATTRIBUTES = {
VALVE_DOMAIN: {
"platform": "group",
CONF_ENTITIES: [DEMO_VALVE1, DEMO_VALVE2, DEMO_VALVE_POS1, DEMO_VALVE_POS2],
CONF_UNIQUE_ID: "unique_identifier",
}
}
@pytest.fixture(scope="module", autouse=True)
def patch_demo_open_close_delay():
"""Patch demo valve open/close delay."""
with patch("homeassistant.components.demo.valve.OPEN_CLOSE_DELAY", 0):
yield
@pytest.fixture
async def setup_comp(
hass: HomeAssistant, config_count: tuple[dict[str, Any], int]
) -> None:
"""Set up group valve component."""
config, count = config_count
with assert_setup_component(count, VALVE_DOMAIN):
await async_setup_component(hass, VALVE_DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
@pytest.mark.usefixtures("setup_comp")
async def test_state(hass: HomeAssistant) -> None:
"""Test handling of state.
The group state is unknown if all group members are unknown or unavailable.
Otherwise, the group state is opening if at least one group member is opening.
Otherwise, the group state is closing if at least one group member is closing.
Otherwise, the group state is open if at least one group member is open.
Otherwise, the group state is closed.
"""
state = hass.states.get(VALVE_GROUP)
# No entity has a valid state -> group state unavailable
assert state.state == STATE_UNAVAILABLE
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert ATTR_ENTITY_ID not in state.attributes
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
# Test group members exposed as attribute
hass.states.async_set(DEMO_VALVE1, STATE_UNKNOWN, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.attributes[ATTR_ENTITY_ID] == [
DEMO_VALVE1,
DEMO_VALVE2,
DEMO_VALVE_POS1,
DEMO_VALVE_POS2,
]
# The group state is unavailable if all group members are unavailable.
hass.states.async_set(DEMO_VALVE1, STATE_UNAVAILABLE, {})
hass.states.async_set(DEMO_VALVE_POS1, STATE_UNAVAILABLE, {})
hass.states.async_set(DEMO_VALVE_POS2, STATE_UNAVAILABLE, {})
hass.states.async_set(DEMO_VALVE2, STATE_UNAVAILABLE, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == STATE_UNAVAILABLE
# The group state is unknown if all group members are unknown or unavailable.
for state_1 in (STATE_UNAVAILABLE, STATE_UNKNOWN):
for state_2 in (STATE_UNAVAILABLE, STATE_UNKNOWN):
for state_3 in (STATE_UNAVAILABLE, STATE_UNKNOWN):
hass.states.async_set(DEMO_VALVE1, state_1, {})
hass.states.async_set(DEMO_VALVE_POS1, state_2, {})
hass.states.async_set(DEMO_VALVE_POS2, state_3, {})
hass.states.async_set(DEMO_VALVE2, STATE_UNKNOWN, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == STATE_UNKNOWN
# At least one member opening -> group opening
for state_1 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
ValveState.OPENING,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_2 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
ValveState.OPENING,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_3 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
ValveState.OPENING,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
hass.states.async_set(DEMO_VALVE1, state_1, {})
hass.states.async_set(DEMO_VALVE_POS1, state_2, {})
hass.states.async_set(DEMO_VALVE_POS2, state_3, {})
hass.states.async_set(DEMO_VALVE2, ValveState.OPENING, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPENING
# At least one member closing -> group closing
for state_1 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_2 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_3 in (
ValveState.CLOSED,
ValveState.CLOSING,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
hass.states.async_set(DEMO_VALVE1, state_1, {})
hass.states.async_set(DEMO_VALVE_POS1, state_2, {})
hass.states.async_set(DEMO_VALVE_POS2, state_3, {})
hass.states.async_set(DEMO_VALVE2, ValveState.CLOSING, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSING
# At least one member open -> group open
for state_1 in (
ValveState.CLOSED,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_2 in (
ValveState.CLOSED,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
for state_3 in (
ValveState.CLOSED,
ValveState.OPEN,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
):
hass.states.async_set(DEMO_VALVE1, state_1, {})
hass.states.async_set(DEMO_VALVE_POS1, state_2, {})
hass.states.async_set(DEMO_VALVE_POS2, state_3, {})
hass.states.async_set(DEMO_VALVE2, ValveState.OPEN, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
# At least one member closed -> group closed
for state_1 in (ValveState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN):
for state_2 in (ValveState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN):
for state_3 in (ValveState.CLOSED, STATE_UNAVAILABLE, STATE_UNKNOWN):
hass.states.async_set(DEMO_VALVE1, state_1, {})
hass.states.async_set(DEMO_VALVE_POS1, state_2, {})
hass.states.async_set(DEMO_VALVE_POS2, state_3, {})
hass.states.async_set(DEMO_VALVE2, ValveState.CLOSED, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSED
# All group members removed from the state machine -> unavailable
hass.states.async_remove(DEMO_VALVE1)
hass.states.async_remove(DEMO_VALVE_POS1)
hass.states.async_remove(DEMO_VALVE_POS2)
hass.states.async_remove(DEMO_VALVE2)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == STATE_UNAVAILABLE
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
@pytest.mark.usefixtures("setup_comp")
async def test_attributes(
hass: HomeAssistant, entity_registry: er.EntityRegistry
) -> None:
"""Test handling of state attributes."""
state = hass.states.get(VALVE_GROUP)
assert state.state == STATE_UNAVAILABLE
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert ATTR_ENTITY_ID not in state.attributes
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
# Set entity as closed
hass.states.async_set(DEMO_VALVE1, ValveState.CLOSED, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSED
assert state.attributes[ATTR_ENTITY_ID] == [
DEMO_VALVE1,
DEMO_VALVE2,
DEMO_VALVE_POS1,
DEMO_VALVE_POS2,
]
# Set entity as opening
hass.states.async_set(DEMO_VALVE1, ValveState.OPENING, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPENING
# Set entity as closing
hass.states.async_set(DEMO_VALVE1, ValveState.CLOSING, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSING
# Set entity as unknown again
hass.states.async_set(DEMO_VALVE1, STATE_UNKNOWN, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == STATE_UNKNOWN
# Add Entity that supports open / close / stop
hass.states.async_set(DEMO_VALVE1, ValveState.OPEN, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11
assert ATTR_CURRENT_POSITION not in state.attributes
# Add Entity that supports set_valve_position
hass.states.async_set(
DEMO_VALVE_POS1,
ValveState.OPEN,
{ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70},
)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15
assert state.attributes[ATTR_CURRENT_POSITION] == 70
### Test state when group members have different states ###
# Valves
hass.states.async_remove(DEMO_VALVE_POS1)
hass.states.async_remove(DEMO_VALVE_POS2)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11
assert ATTR_CURRENT_POSITION not in state.attributes
# Test entity registry integration
entry = entity_registry.async_get(VALVE_GROUP)
assert entry
assert entry.unique_id == "unique_identifier"
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_open_valves(hass: HomeAssistant) -> None:
"""Test open valve function."""
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_OPEN_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_VALVE1).state == ValveState.OPEN
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_close_valves(hass: HomeAssistant) -> None:
"""Test close valve function."""
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_CLOSE_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_VALVE1).state == ValveState.CLOSED
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 0
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_toggle_valves(hass: HomeAssistant) -> None:
"""Test toggle valve function."""
# Start valves in open state
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_OPEN_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
# Toggle will close valves
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_VALVE1).state == ValveState.CLOSED
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 0
# Toggle again will open valves
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_VALVE1).state == ValveState.OPEN
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_stop_valves(hass: HomeAssistant) -> None:
"""Test stop valve function."""
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_OPEN_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPENING
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_STOP_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 60 # (40 + 80) / 2
assert hass.states.get(DEMO_VALVE1).state == ValveState.OPEN
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 80
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 40
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_set_valve_position(hass: HomeAssistant) -> None:
"""Test set valve position function."""
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_SET_VALVE_POSITION,
{ATTR_ENTITY_ID: VALVE_GROUP, ATTR_POSITION: 50},
blocking=True,
)
for _ in range(4):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.state == ValveState.OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_VALVE1).state == ValveState.OPEN
assert hass.states.get(DEMO_VALVE_POS1).attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_VALVE_POS2).attributes[ATTR_CURRENT_POSITION] == 50
@pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)])
@pytest.mark.usefixtures("setup_comp")
async def test_is_opening_closing(hass: HomeAssistant) -> None:
"""Test is_opening property."""
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_OPEN_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
await hass.async_block_till_done()
# Both valves opening -> opening
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.OPENING
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.OPENING
assert hass.states.get(VALVE_GROUP).state == ValveState.OPENING
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
VALVE_DOMAIN, SERVICE_CLOSE_VALVE, {ATTR_ENTITY_ID: VALVE_GROUP}, blocking=True
)
# Both valves closing -> closing
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get(VALVE_GROUP).state == ValveState.CLOSING
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.OPENING, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
# Closing + Opening -> Opening
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.OPENING
assert hass.states.get(VALVE_GROUP).state == ValveState.OPENING
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.CLOSING, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
# Both valves closing -> closing
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.CLOSING
assert hass.states.get(VALVE_GROUP).state == ValveState.CLOSING
# Closed + Closing -> Closing
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.CLOSED
assert hass.states.get(VALVE_GROUP).state == ValveState.CLOSING
# Open + Closing -> Closing
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.OPEN, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.OPEN
assert hass.states.get(VALVE_GROUP).state == ValveState.CLOSING
# Closed + Opening -> Closing
hass.states.async_set(
DEMO_VALVE_POS2, ValveState.OPENING, {ATTR_SUPPORTED_FEATURES: 11}
)
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.CLOSED, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.OPENING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.CLOSED
assert hass.states.get(VALVE_GROUP).state == ValveState.OPENING
# Open + Opening -> Closing
hass.states.async_set(
DEMO_VALVE_POS1, ValveState.OPEN, {ATTR_SUPPORTED_FEATURES: 11}
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.OPENING
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.OPEN
assert hass.states.get(VALVE_GROUP).state == ValveState.OPENING
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
@pytest.mark.usefixtures("setup_comp")
async def test_assumed_state(hass: HomeAssistant) -> None:
"""Test assumed_state attribute behavior."""
# No members with assumed_state -> group doesn't have assumed_state in attributes
hass.states.async_set(DEMO_VALVE1, ValveState.OPEN, {})
hass.states.async_set(DEMO_VALVE_POS1, ValveState.OPEN, {})
hass.states.async_set(DEMO_VALVE_POS2, ValveState.CLOSED, {})
hass.states.async_set(DEMO_VALVE2, ValveState.CLOSED, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert ATTR_ASSUMED_STATE not in state.attributes
# One member with assumed_state=True -> group has assumed_state=True
hass.states.async_set(DEMO_VALVE1, ValveState.OPEN, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.attributes.get(ATTR_ASSUMED_STATE) is True
# Multiple members with assumed_state=True -> group has assumed_state=True
hass.states.async_set(
DEMO_VALVE_POS2, ValveState.CLOSED, {ATTR_ASSUMED_STATE: True}
)
hass.states.async_set(DEMO_VALVE2, ValveState.CLOSED, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.attributes.get(ATTR_ASSUMED_STATE) is True
# Unavailable member with assumed_state=True -> group has assumed_state=True
hass.states.async_set(DEMO_VALVE1, ValveState.OPEN, {})
hass.states.async_set(DEMO_VALVE_POS2, ValveState.CLOSED, {})
hass.states.async_set(DEMO_VALVE2, STATE_UNAVAILABLE, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.attributes.get(ATTR_ASSUMED_STATE) is True
# Unknown member with assumed_state=True -> group has assumed_state=True
hass.states.async_set(DEMO_VALVE2, STATE_UNKNOWN, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert state.attributes.get(ATTR_ASSUMED_STATE) is True
# All members without assumed_state -> group doesn't have assumed_state in attributes
hass.states.async_set(DEMO_VALVE2, ValveState.CLOSED, {})
await hass.async_block_till_done()
state = hass.states.get(VALVE_GROUP)
assert ATTR_ASSUMED_STATE not in state.attributes
async def test_nested_group(hass: HomeAssistant) -> None:
"""Test nested valve group."""
await async_setup_component(
hass,
VALVE_DOMAIN,
{
VALVE_DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
"entities": ["valve.bedroom_group"],
"name": "Nested Group",
},
{
"platform": "group",
CONF_ENTITIES: [DEMO_VALVE_POS1, DEMO_VALVE_POS2],
"name": "Bedroom Group",
},
]
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("valve.bedroom_group")
assert state is not None
assert state.state == ValveState.OPEN
assert state.attributes.get(ATTR_ENTITY_ID) == [DEMO_VALVE_POS1, DEMO_VALVE_POS2]
state = hass.states.get("valve.nested_group")
assert state is not None
assert state.state == ValveState.OPEN
assert state.attributes.get(ATTR_ENTITY_ID) == ["valve.bedroom_group"]
# Test controlling the nested group
async with asyncio.timeout(0.5):
await hass.services.async_call(
VALVE_DOMAIN,
SERVICE_CLOSE_VALVE,
{ATTR_ENTITY_ID: "valve.nested_group"},
blocking=True,
)
assert hass.states.get(DEMO_VALVE_POS1).state == ValveState.CLOSING
assert hass.states.get(DEMO_VALVE_POS2).state == ValveState.CLOSING
assert hass.states.get("valve.bedroom_group").state == ValveState.CLOSING
assert hass.states.get("valve.nested_group").state == ValveState.CLOSING

View File

@@ -191,6 +191,15 @@ async def test_remove_node_callback(
device = device_registry.async_get_device(identifiers={(DOMAIN, f"{HOMEE_ID}-3")})
assert device is not None
# Test device not removed when callback called with add=True
await mock_homee.add_nodes_listener.call_args_list[0][0][0](
mock_homee.nodes[2], add=True
)
await hass.async_block_till_done()
device = device_registry.async_get_device(identifiers={(DOMAIN, f"{HOMEE_ID}-3")})
assert device is not None
# Simulate removal of node with id 3 in homee
await mock_homee.add_nodes_listener.call_args_list[0][0][0](
mock_homee.nodes[2], add=False

View File

@@ -90,6 +90,56 @@ async def test_x_forwarded_for_with_trusted_proxy(
assert resp.status == HTTPStatus.OK
@pytest.mark.parametrize(
("trusted_proxies", "x_forwarded_for", "remote"),
[
(
["127.0.0.0/24", "1.1.1.1", "10.10.10.0/24"],
["10.10.10.10", "1.1.1.1"],
"10.10.10.10",
),
(
["127.0.0.0/24", "1.1.1.1"],
["123.123.123.123", "2.2.2.2", "1.1.1.1"],
"2.2.2.2",
),
(["127.0.0.0/24"], ["123.123.123.123", "2.2.2.2", "1.1.1.1"], "1.1.1.1"),
(["127.0.0.1", "1.1.1.1"], ["123.123.123.123", "1.1.1.1"], "123.123.123.123"),
(
["127.0.0.1", "1.1.1.1"],
["123.123.123.123", "2.2.2.2", "1.1.1.1"],
"2.2.2.2",
),
],
)
async def test_x_multiple_forwarded_for_with_trusted_proxy(
trusted_proxies, x_forwarded_for, remote, aiohttp_client: ClientSessionGenerator
) -> None:
"""Test that we get the IP from multiple forwarded for headers."""
async def handler(request):
url = mock_api_client.make_url("/")
assert request.host == f"{url.host}:{url.port}"
assert request.scheme == "http"
assert not request.secure
assert request.remote == remote
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
async_setup_forwarded(
app, True, [ip_network(trusted_proxy) for trusted_proxy in trusted_proxies]
)
mock_api_client = await aiohttp_client(app)
resp = await mock_api_client.get(
"/", headers=[(X_FORWARDED_FOR, addr) for addr in x_forwarded_for]
)
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_for_disabled_with_proxy(
aiohttp_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture
) -> None:
@@ -176,28 +226,6 @@ async def test_x_forwarded_for_with_malformed_header(
assert "Invalid IP address in X-Forwarded-For" in caplog.text
async def test_x_forwarded_for_with_multiple_headers(
aiohttp_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that we get a HTTP 400 bad request with multiple headers."""
app = web.Application()
app.router.add_get("/", mock_handler)
async_setup_forwarded(app, True, [ip_network("127.0.0.1")])
mock_api_client = await aiohttp_client(app)
resp = await mock_api_client.get(
"/",
headers=[
(X_FORWARDED_FOR, "222.222.222.222"),
(X_FORWARDED_FOR, "123.123.123.123"),
],
)
assert resp.status == HTTPStatus.BAD_REQUEST
assert "Too many headers for X-Forwarded-For" in caplog.text
@pytest.mark.parametrize(
("x_forwarded_for", "remote", "x_forwarded_proto", "secure"),
[
@@ -258,6 +286,65 @@ async def test_x_forwarded_proto_with_trusted_proxy(
assert resp.status == HTTPStatus.OK
@pytest.mark.parametrize(
("x_forwarded_for", "remote", "x_forwarded_proto", "secure"),
[
(
"10.10.10.10, 127.0.0.1, 127.0.0.2",
"10.10.10.10",
["https", "http", "http"],
True,
),
(
"10.10.10.10, 127.0.0.1, 127.0.0.2",
"10.10.10.10",
["http", "https", "https"],
False,
),
(
"255.255.255.255, 10.10.10.10, 127.0.0.1",
"10.10.10.10",
["http", "https", "http"],
True,
),
(
"255.255.255.255, 10.10.10.10, 127.0.0.1",
"10.10.10.10",
["https", "http", "https"],
False,
),
],
)
async def test_x_multiple_forwarded_proto_with_trusted_proxy(
x_forwarded_for,
remote,
x_forwarded_proto,
secure,
aiohttp_client: ClientSessionGenerator,
) -> None:
"""Test that we get the proto header if proxy is trusted."""
async def handler(request):
assert request.remote == remote
assert request.scheme == ("https" if secure else "http")
assert request.secure == secure
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
async_setup_forwarded(app, True, [ip_network("127.0.0.0/24")])
mock_api_client = await aiohttp_client(app)
resp = await mock_api_client.get(
"/",
headers=[(X_FORWARDED_FOR, x_forwarded_for)]
+ [(X_FORWARDED_PROTO, proto) for proto in x_forwarded_proto],
)
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_proto_with_trusted_proxy_multiple_for(
aiohttp_client: ClientSessionGenerator,
) -> None:
@@ -288,6 +375,38 @@ async def test_x_forwarded_proto_with_trusted_proxy_multiple_for(
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_proto_with_trusted_proxy_multiple_for_2(
aiohttp_client: ClientSessionGenerator,
) -> None:
"""Test that we get the proto with 1 element in the proto, multiple in the for."""
async def handler(request):
url = mock_api_client.make_url("/")
assert request.host == f"{url.host}:{url.port}"
assert request.scheme == "https"
assert request.secure
assert request.remote == "255.255.255.255"
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
async_setup_forwarded(app, True, [ip_network("127.0.0.0/24")])
mock_api_client = await aiohttp_client(app)
resp = await mock_api_client.get(
"/",
headers=[
(X_FORWARDED_FOR, "255.255.255.255"),
(X_FORWARDED_FOR, "127.0.0.1"),
(X_FORWARDED_FOR, "127.0.0.2"),
(X_FORWARDED_PROTO, "https"),
],
)
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_proto_not_processed_without_for(
aiohttp_client: ClientSessionGenerator,
) -> None:
@@ -312,28 +431,6 @@ async def test_x_forwarded_proto_not_processed_without_for(
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_proto_with_multiple_headers(
aiohttp_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that we get a HTTP 400 bad request with multiple headers."""
app = web.Application()
app.router.add_get("/", mock_handler)
async_setup_forwarded(app, True, [ip_network("127.0.0.1")])
mock_api_client = await aiohttp_client(app)
resp = await mock_api_client.get(
"/",
headers=[
(X_FORWARDED_FOR, "222.222.222.222"),
(X_FORWARDED_PROTO, "https"),
(X_FORWARDED_PROTO, "http"),
],
)
assert resp.status == HTTPStatus.BAD_REQUEST
assert "Too many headers for X-Forward-Proto" in caplog.text
@pytest.mark.parametrize(
"x_forwarded_proto",
["", ",", "https, , https", "https, https, "],
@@ -447,7 +544,7 @@ async def test_x_forwarded_host_not_processed_without_for(
async def test_x_forwarded_host_with_multiple_headers(
aiohttp_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture
) -> None:
"""Test that we get a HTTP 400 bad request with multiple headers."""
"""Test that we get a HTTP 200 OK with multiple headers."""
app = web.Application()
app.router.add_get("/", mock_handler)
async_setup_forwarded(app, True, [ip_network("127.0.0.1")])
@@ -462,8 +559,7 @@ async def test_x_forwarded_host_with_multiple_headers(
],
)
assert resp.status == HTTPStatus.BAD_REQUEST
assert "Too many headers for X-Forwarded-Host" in caplog.text
assert resp.status == HTTPStatus.OK
async def test_x_forwarded_host_with_empty_header(

View File

@@ -192,17 +192,11 @@ async def test_websocket_not_available(
await hass.async_block_till_done()
assert f"{error_msg} Trying to reconnect: Boom" in caplog.text
# Simulate a successful connection
caplog.clear()
await mock_called.wait()
mock_called.clear()
await hass.async_block_till_done()
assert mock.call_count == 2
assert "Trying to reconnect: Boom" not in caplog.text
# Simulate hass shutting down
await hass.async_stop()
assert mock.call_count == 2
assert mock.call_count == 1
async def test_device_info(

View File

@@ -2735,7 +2735,7 @@
'state': 'on',
})
# ---
# name: test_switch[switch.fake_profile_block_xbox_live-entry]
# name: test_switch[switch.fake_profile_block_xbox_network-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -2748,7 +2748,7 @@
'disabled_by': None,
'domain': 'switch',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'switch.fake_profile_block_xbox_live',
'entity_id': 'switch.fake_profile_block_xbox_network',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -2760,7 +2760,7 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Block Xbox Live',
'original_name': 'Block Xbox Network',
'platform': 'nextdns',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -2770,13 +2770,13 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[switch.fake_profile_block_xbox_live-state]
# name: test_switch[switch.fake_profile_block_xbox_network-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Fake Profile Block Xbox Live',
'friendly_name': 'Fake Profile Block Xbox Network',
}),
'context': <ANY>,
'entity_id': 'switch.fake_profile_block_xbox_live',
'entity_id': 'switch.fake_profile_block_xbox_network',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,

View File

@@ -36,6 +36,7 @@ def mock_nintendo_device() -> Device:
mock.today_playing_time = 110
mock.bedtime_alarm = time(hour=19)
mock.set_bedtime_alarm.return_value = None
mock.update_max_daily_playtime.return_value = None
mock.forced_termination_mode = True
return mock

View File

@@ -0,0 +1,59 @@
# serializer version: 1
# name: test_number[number.home_assistant_test_max_screentime_today-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 360,
'min': -1,
'mode': <NumberMode.BOX: 'box'>,
'step': 1,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'number',
'entity_category': None,
'entity_id': 'number.home_assistant_test_max_screentime_today',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Max screentime today',
'platform': 'nintendo_parental_controls',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <NintendoParentalNumber.TODAY_MAX_SCREENTIME: 'today_max_screentime'>,
'unique_id': 'testdevid_today_max_screentime',
'unit_of_measurement': <UnitOfTime.MINUTES: 'min'>,
})
# ---
# name: test_number[number.home_assistant_test_max_screentime_today-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Home Assistant Test Max screentime today',
'max': 360,
'min': -1,
'mode': <NumberMode.BOX: 'box'>,
'step': 1,
'unit_of_measurement': <UnitOfTime.MINUTES: 'min'>,
}),
'context': <ANY>,
'entity_id': 'number.home_assistant_test_max_screentime_today',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '120',
})
# ---

View File

@@ -0,0 +1,58 @@
"""Test number platform for Nintendo Parental Controls."""
from unittest.mock import AsyncMock, patch
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.number import (
ATTR_VALUE,
DOMAIN as NUMBER_DOMAIN,
SERVICE_SET_VALUE,
)
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_number(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_nintendo_client: AsyncMock,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test number platform."""
with patch(
"homeassistant.components.nintendo_parental_controls._PLATFORMS",
[Platform.NUMBER],
):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
async def test_set_number(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_nintendo_client: AsyncMock,
mock_nintendo_device: AsyncMock,
) -> None:
"""Test number platform service."""
with patch(
"homeassistant.components.nintendo_parental_controls._PLATFORMS",
[Platform.NUMBER],
):
await setup_integration(hass, mock_config_entry)
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
service_data={ATTR_VALUE: "120"},
target={ATTR_ENTITY_ID: "number.home_assistant_test_max_screentime_today"},
blocking=True,
)
assert len(mock_nintendo_device.update_max_daily_playtime.mock_calls) == 1

View File

@@ -51,7 +51,7 @@
'state': 'off',
})
# ---
# name: test_protection_window_recalculation[after-protetction-state]
# name: test_protection_window_recalculation[after-protection-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'end_time': datetime.datetime(2018, 7, 30, 16, 47, 49, 750000, tzinfo=zoneinfo.ZoneInfo(key='America/Regina')),
@@ -68,7 +68,7 @@
'state': 'off',
})
# ---
# name: test_protection_window_recalculation[before-protetction-state]
# name: test_protection_window_recalculation[before-protection-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'end_time': datetime.datetime(2018, 7, 30, 16, 47, 49, 750000, tzinfo=zoneinfo.ZoneInfo(key='America/Regina')),
@@ -85,7 +85,7 @@
'state': 'off',
})
# ---
# name: test_protection_window_recalculation[during-protetction-state]
# name: test_protection_window_recalculation[during-protection-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'end_time': datetime.datetime(2018, 7, 30, 16, 47, 49, 750000, tzinfo=zoneinfo.ZoneInfo(key='America/Regina')),

View File

@@ -3,12 +3,14 @@
from unittest.mock import patch
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.homeassistant import (
DOMAIN as HOMEASSISTANT_DOMAIN,
SERVICE_UPDATE_ENTITY,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
@@ -32,7 +34,7 @@ async def test_binary_sensors(
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
async def test_protetction_window_update(
async def test_protection_window_update(
hass: HomeAssistant,
set_time_zone,
config,
@@ -43,7 +45,7 @@ async def test_protetction_window_update(
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test that updating the protetection window makes an extra API call."""
"""Test that updating the protection window makes an extra API call."""
assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {})
@@ -59,6 +61,55 @@ async def test_protetction_window_update(
assert client.uv_protection_window.call_count == 2
@pytest.mark.parametrize(
"data_protection_window",
[{"result": {"from_time": None, "from_uv": 0, "to_time": None, "to_uv": 0}}],
)
async def test_protection_window_null_value_response(
hass: HomeAssistant,
set_time_zone,
config,
client,
config_entry,
setup_config_entry,
) -> None:
"""Test that null values in the protection window clears the state."""
entity_id = "binary_sensor.openuv_protection_window"
hass.states.async_set(entity_id, "on", {})
assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {})
await hass.services.async_call(
HOMEASSISTANT_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.state == "unknown"
@pytest.mark.parametrize(
"data_protection_window",
[{"result": {"error": "missing expected keys"}}],
)
async def test_protection_window_invalid_response(
hass: HomeAssistant,
set_time_zone,
config,
client,
config_entry,
mock_pyopenuv,
) -> None:
"""Test that missing values in the protection window generate an error."""
assert await hass.config_entries.async_setup(config_entry.entry_id) is False
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_RETRY
async def test_protection_window_recalculation(
hass: HomeAssistant,
config,
@@ -71,7 +122,7 @@ async def test_protection_window_recalculation(
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test that protetction window updates automatically without extra API calls."""
"""Test that protection window updates automatically without extra API calls."""
freezer.move_to("2018-07-30T06:17:59-06:00")
@@ -81,9 +132,9 @@ async def test_protection_window_recalculation(
entity_id = "binary_sensor.openuv_protection_window"
state = hass.states.get(entity_id)
assert state.state == "off"
assert state == snapshot(name="before-protetction-state")
assert state == snapshot(name="before-protection-state")
# move to when the protetction window starts
# move to when the protection window starts
freezer.move_to("2018-07-30T09:17:59-06:00")
async_fire_time_changed(hass)
await hass.async_block_till_done()
@@ -91,9 +142,9 @@ async def test_protection_window_recalculation(
entity_id = "binary_sensor.openuv_protection_window"
state = hass.states.get(entity_id)
assert state.state == "on"
assert state == snapshot(name="during-protetction-state")
assert state == snapshot(name="during-protection-state")
# move to when the protetction window ends
# move to when the protection window ends
freezer.move_to("2018-07-30T16:47:59-06:00")
async_fire_time_changed(hass)
await hass.async_block_till_done()
@@ -101,6 +152,6 @@ async def test_protection_window_recalculation(
entity_id = "binary_sensor.openuv_protection_window"
state = hass.states.get(entity_id)
assert state.state == "off"
assert state == snapshot(name="after-protetction-state")
assert state == snapshot(name="after-protection-state")
assert client.uv_protection_window.call_count == 1

View File

@@ -1,13 +1,26 @@
"""Tests for the Portainer integration."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry
async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
"""Fixture for setting up the component."""
async def setup_integration(
hass: HomeAssistant,
config_entry: MockConfigEntry,
) -> None:
"""Set up the Portainer integration for testing and enable all entities."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
entity_registry = er.async_get(hass)
for entry in er.async_entries_for_config_entry(
entity_registry, config_entry.entry_id
):
if entry.disabled_by is not None:
entity_registry.async_update_entity(entry.entity_id, disabled_by=None)
await hass.async_block_till_done()

View File

@@ -4,13 +4,18 @@ from collections.abc import Generator
from unittest.mock import AsyncMock, patch
from pyportainer.models.docker import DockerContainer
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
import pytest
from homeassistant.components.portainer.const import DOMAIN
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
from tests.common import MockConfigEntry, load_json_array_fixture
from tests.common import (
MockConfigEntry,
load_json_array_fixture,
load_json_value_fixture,
)
MOCK_TEST_CONFIG = {
CONF_URL: "https://127.0.0.1:9000/",
@@ -49,6 +54,13 @@ def mock_portainer_client() -> Generator[AsyncMock]:
DockerContainer.from_dict(container)
for container in load_json_array_fixture("containers.json", DOMAIN)
]
client.docker_info.return_value = DockerInfo.from_dict(
load_json_value_fixture("docker_info.json", DOMAIN)
)
client.docker_version.return_value = DockerVersion.from_dict(
load_json_value_fixture("docker_version.json", DOMAIN)
)
client.restart_container = AsyncMock(return_value=None)
yield client

View File

@@ -0,0 +1,106 @@
{
"ID": "7TRN:IPZB:QYBB:VPBQ:UMPP:KARE:6ZNR:XE6T:7EWV:PKF4:ZOJD:TPYS",
"Containers": 14,
"ContainersRunning": 3,
"ContainersPaused": 1,
"ContainersStopped": 10,
"Images": 508,
"Driver": "overlay2",
"DriverStatus": [
["Backing Filesystem", "extfs"],
["Supports d_type", "true"],
["Native Overlay Diff", "true"]
],
"DockerRootDir": "/var/lib/docker",
"Plugins": {
"Volume": [],
"Network": [],
"Authorization": [],
"Log": []
},
"MemoryLimit": true,
"SwapLimit": true,
"KernelMemoryTCP": true,
"CpuCfsPeriod": true,
"CpuCfsQuota": true,
"CPUShares": true,
"CPUSet": true,
"PidsLimit": true,
"OomKillDisable": true,
"IPv4Forwarding": true,
"BridgeNfIptables": true,
"BridgeNfIp6tables": true,
"Debug": true,
"NFd": 64,
"NGoroutines": 174,
"SystemTime": "2017-08-08T20:28:29.06202363Z",
"LoggingDriver": "json-file",
"CgroupDriver": "cgroupfs",
"CgroupVersion": "1",
"NEventsListener": 30,
"KernelVersion": "4.9.38-moby",
"OperatingSystem": "Alpine Linux v3.5",
"OSVersion": "16.04",
"OSType": "linux",
"Architecture": "x86_64",
"NCPU": 4,
"MemTotal": 2095882240,
"IndexServerAddress": "https://index.docker.io/v1/",
"RegistryConfig": {
"AllowNondistributableArtifactsCIDRs": [],
"AllowNondistributableArtifactsHostnames": [],
"InsecureRegistryCIDRs": [],
"IndexConfigs": {},
"Mirrors": []
},
"GenericResources": [{}, {}, {}],
"HttpProxy": "http://xxxxx:xxxxx@proxy.corp.example.com:8080",
"HttpsProxy": "https://xxxxx:xxxxx@proxy.corp.example.com:4443",
"NoProxy": "*.local, 169.254/16",
"Name": "node5.corp.example.com",
"Labels": ["storage=ssd", "production"],
"ExperimentalBuild": true,
"ServerVersion": "24.0.2",
"Runtimes": {
"runc": {},
"runc-master": {},
"custom": {}
},
"DefaultRuntime": "runc",
"Swarm": {
"NodeID": "k67qz4598weg5unwwffg6z1m1",
"NodeAddr": "10.0.0.46",
"LocalNodeState": "active",
"ControlAvailable": true,
"Error": "",
"RemoteManagers": [],
"Nodes": 4,
"Managers": 3,
"Cluster": {}
},
"LiveRestoreEnabled": false,
"Isolation": "default",
"InitBinary": "docker-init",
"ContainerdCommit": {
"ID": "cfb82a876ecc11b5ca0977d1733adbe58599088a",
"Expected": "2d41c047c83e09a6d61d464906feb2a2f3c52aa4"
},
"RuncCommit": {
"ID": "cfb82a876ecc11b5ca0977d1733adbe58599088a",
"Expected": "2d41c047c83e09a6d61d464906feb2a2f3c52aa4"
},
"InitCommit": {
"ID": "cfb82a876ecc11b5ca0977d1733adbe58599088a",
"Expected": "2d41c047c83e09a6d61d464906feb2a2f3c52aa4"
},
"SecurityOptions": [
"name=apparmor",
"name=seccomp,profile=default",
"name=selinux",
"name=userns",
"name=rootless"
],
"ProductLicense": "Community Engine",
"DefaultAddressPools": [{}],
"Warnings": ["WARNING: No memory limit support"]
}

View File

@@ -0,0 +1,16 @@
{
"Platform": {
"Name": "string"
},
"Components": [{}],
"Version": "19.03.12",
"ApiVersion": "1.40",
"MinAPIVersion": "1.12",
"GitCommit": "48a66213fe",
"GoVersion": "go1.13.14",
"Os": "linux",
"Arch": "amd64",
"KernelVersion": "4.19.76-linuxkit",
"Experimental": true,
"BuildTime": "2020-06-22T15:49:27.000000000+00:00"
}

View File

@@ -95,6 +95,656 @@
'state': 'docker.io/library/ubuntu:latest',
})
# ---
# name: test_all_entities[sensor.my_environment_api_version-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_api_version',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'API version',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'api_version',
'unique_id': 'portainer_test_entry_123_1_api_version',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_api_version-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment API version',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_api_version',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1.40',
})
# ---
# name: test_all_entities[sensor.my_environment_architecture-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_architecture',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Architecture',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'architecture',
'unique_id': 'portainer_test_entry_123_1_architecture',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_architecture-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Architecture',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_architecture',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'x86_64',
})
# ---
# name: test_all_entities[sensor.my_environment_container_count-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_container_count',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Container count',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'containers_count',
'unique_id': 'portainer_test_entry_123_1_containers_count',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_container_count-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Container count',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_container_count',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '14',
})
# ---
# name: test_all_entities[sensor.my_environment_containers_paused-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_containers_paused',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Containers paused',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'containers_paused',
'unique_id': 'portainer_test_entry_123_1_containers_paused',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_containers_paused-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Containers paused',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_containers_paused',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1',
})
# ---
# name: test_all_entities[sensor.my_environment_containers_running-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_containers_running',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Containers running',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'containers_running',
'unique_id': 'portainer_test_entry_123_1_containers_running',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_containers_running-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Containers running',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_containers_running',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '3',
})
# ---
# name: test_all_entities[sensor.my_environment_containers_stopped-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_containers_stopped',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Containers stopped',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'containers_stopped',
'unique_id': 'portainer_test_entry_123_1_containers_stopped',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_containers_stopped-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Containers stopped',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_containers_stopped',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '10',
})
# ---
# name: test_all_entities[sensor.my_environment_docker_version-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_docker_version',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Docker version',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'docker_version',
'unique_id': 'portainer_test_entry_123_1_docker_version',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_docker_version-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Docker version',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_docker_version',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '24.0.2',
})
# ---
# name: test_all_entities[sensor.my_environment_image_count-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_image_count',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Image count',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'images_count',
'unique_id': 'portainer_test_entry_123_1_images_count',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_image_count-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Image count',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_image_count',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '508',
})
# ---
# name: test_all_entities[sensor.my_environment_kernel_version-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_kernel_version',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Kernel version',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'kernel_version',
'unique_id': 'portainer_test_entry_123_1_kernel_version',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_kernel_version-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Kernel version',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_kernel_version',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '4.19.76-linuxkit',
})
# ---
# name: test_all_entities[sensor.my_environment_operating_system-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_operating_system',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Operating system',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'operating_system',
'unique_id': 'portainer_test_entry_123_1_operating_system',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_operating_system-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Operating system',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_operating_system',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'linux',
})
# ---
# name: test_all_entities[sensor.my_environment_operating_system_version-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_operating_system_version',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Operating system version',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'operating_system_version',
'unique_id': 'portainer_test_entry_123_1_operating_system_version',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_operating_system_version-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Operating system version',
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_operating_system_version',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '16.04',
})
# ---
# name: test_all_entities[sensor.my_environment_total_cpu-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_total_cpu',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Total CPU',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'cpu_total',
'unique_id': 'portainer_test_entry_123_1_cpu_total',
'unit_of_measurement': None,
})
# ---
# name: test_all_entities[sensor.my_environment_total_cpu-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'my-environment Total CPU',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_total_cpu',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '4',
})
# ---
# name: test_all_entities[sensor.my_environment_total_memory-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.my_environment_total_memory',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 0,
}),
}),
'original_device_class': <SensorDeviceClass.DATA_SIZE: 'data_size'>,
'original_icon': None,
'original_name': 'Total memory',
'platform': 'portainer',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'memory_total',
'unique_id': 'portainer_test_entry_123_1_memory_total',
'unit_of_measurement': <UnitOfInformation.BYTES: 'B'>,
})
# ---
# name: test_all_entities[sensor.my_environment_total_memory-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'data_size',
'friendly_name': 'my-environment Total memory',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfInformation.BYTES: 'B'>,
}),
'context': <ANY>,
'entity_id': 'sensor.my_environment_total_memory',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '2095882240',
})
# ---
# name: test_all_entities[sensor.practical_morse_image-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -23,6 +23,11 @@ from . import setup_integration
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@pytest.fixture(autouse=True)
def enable_all_entities(entity_registry_enabled_by_default: None) -> None:
"""Make sure all entities are enabled."""
async def test_all_entities(
hass: HomeAssistant,
snapshot: SnapshotAssertion,

View File

@@ -14,6 +14,11 @@ from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@pytest.fixture(autouse=True)
def enable_all_entities(entity_registry_enabled_by_default: None) -> None:
"""Make sure all entities are enabled."""
@pytest.mark.usefixtures("mock_portainer_client")
async def test_all_entities(
hass: HomeAssistant,
@@ -28,5 +33,8 @@ async def test_all_entities(
):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(
hass, entity_registry, snapshot, mock_config_entry.entry_id
hass,
entity_registry,
snapshot,
mock_config_entry.entry_id,
)

View File

@@ -307,3 +307,98 @@ async def test_web_login_errors(
CONF_PASSWORD: PASSWORD,
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_reconfigure_flow_api_key(
recorder_mock: Recorder,
hass: HomeAssistant,
solaredge_api: Mock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test reconfigure flow with API key."""
entry = MockConfigEntry(
domain=DOMAIN,
title=NAME,
data={CONF_SITE_ID: SITE_ID, CONF_API_KEY: "old_api_key"},
)
entry.add_to_hass(hass)
result = await entry.start_reconfigure_flow(hass)
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "reconfigure"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_SECTION_API_AUTH: {CONF_API_KEY: API_KEY},
},
)
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "reconfigure_successful"
await hass.async_block_till_done()
assert entry.title == NAME
assert entry.data[CONF_SITE_ID] == SITE_ID
assert entry.data[CONF_API_KEY] == API_KEY
assert mock_setup_entry.call_count == 1
async def test_reconfigure_flow_web_login_and_errors(
recorder_mock: Recorder,
hass: HomeAssistant,
solaredge_web_api: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test reconfigure flow with web login and error handling."""
entry = MockConfigEntry(
domain=DOMAIN,
title=NAME,
data={CONF_SITE_ID: SITE_ID, CONF_API_KEY: "old_api_key"},
)
entry.add_to_hass(hass)
result = await entry.start_reconfigure_flow(hass)
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "reconfigure"
# Test error
solaredge_web_api.async_get_equipment.side_effect = ClientResponseError(
None, None, status=401
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_SECTION_WEB_AUTH: {
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
},
},
)
assert result.get("type") is FlowResultType.FORM
assert result.get("errors") == {"base": "invalid_auth"}
# Test recovery
solaredge_web_api.async_get_equipment.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_SECTION_WEB_AUTH: {
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
},
},
)
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "reconfigure_successful"
await hass.async_block_till_done()
assert entry.title == NAME
assert entry.data == {
CONF_SITE_ID: SITE_ID,
CONF_USERNAME: USERNAME,
CONF_PASSWORD: PASSWORD,
}
assert mock_setup_entry.call_count == 1

View File

@@ -4,9 +4,11 @@ from __future__ import annotations
from unittest.mock import patch
import pytest
from syrupy.assertion import SnapshotAssertion
from tuya_sharing import CustomerDevice, Manager
from homeassistant.components.alarm_control_panel import AlarmControlPanelState
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
@@ -29,3 +31,56 @@ async def test_platform_setup_and_discovery(
await initialize_entry(hass, mock_manager, mock_config_entry, mock_devices)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
"mock_device_code",
["mal_gyitctrjj1kefxp2"],
)
async def test_alarm_state_triggered(
hass: HomeAssistant,
mock_manager: Manager,
mock_config_entry: MockConfigEntry,
mock_device: CustomerDevice,
) -> None:
"""Test alarm state returns TRIGGERED for non-battery alarms."""
entity_id = "alarm_control_panel.multifunction_alarm"
# Set up alarm state without battery warning
mock_device.status["master_state"] = "alarm"
mock_device.status["alarm_msg"] = (
"AFQAZQBzAHQAIABTAGUAbgBzAG8Acg==" # "Test Sensor" in UTF-16BE
)
await initialize_entry(hass, mock_manager, mock_config_entry, mock_device)
state = hass.states.get(entity_id)
assert state is not None, f"{entity_id} does not exist"
assert state.state == AlarmControlPanelState.TRIGGERED
@pytest.mark.parametrize(
"mock_device_code",
["mal_gyitctrjj1kefxp2"],
)
async def test_alarm_state_battery_warning(
hass: HomeAssistant,
mock_manager: Manager,
mock_config_entry: MockConfigEntry,
mock_device: CustomerDevice,
) -> None:
"""Test alarm state ignores battery warnings."""
entity_id = "alarm_control_panel.multifunction_alarm"
# Set up alarm state with battery warning
mock_device.status["master_state"] = "alarm"
mock_device.status["alarm_msg"] = (
"AFMAZQBuAHMAbwByACAATABvAHcAIABCAGEAdAB0AGUAcgB5ACAAVABlAHMAdAAgAFMAZQBuAHMAbwBy" # "Sensor Low Battery Test Sensor" in UTF-16BE
)
await initialize_entry(hass, mock_manager, mock_config_entry, mock_device)
state = hass.states.get(entity_id)
assert state is not None, f"{entity_id} does not exist"
# Should not be triggered for battery warnings
assert state.state != AlarmControlPanelState.TRIGGERED

View File

@@ -670,7 +670,7 @@
'friendly_name': 'SmartTowerFan',
'mode': 'normal',
'oscillating': True,
'percentage': None,
'percentage': 0,
'percentage_step': 8.333333333333334,
'preset_mode': 'normal',
'preset_modes': list([

View File

@@ -38,7 +38,7 @@ async def setup_credentials(hass: HomeAssistant) -> None:
def mock_oauth2_implementation() -> Generator[AsyncMock]:
"""Mock config entry oauth2 implementation."""
with patch(
"homeassistant.components.xbox.config_entry_oauth2_flow.async_get_config_entry_implementation",
"homeassistant.components.xbox.coordinator.config_entry_oauth2_flow.async_get_config_entry_implementation",
return_value=AsyncMock(),
) as mock_client:
client = mock_client.return_value
@@ -89,7 +89,7 @@ def mock_signed_session() -> Generator[AsyncMock]:
with (
patch(
"homeassistant.components.xbox.SignedSession", autospec=True
"homeassistant.components.xbox.coordinator.SignedSession", autospec=True
) as mock_client,
patch(
"homeassistant.components.xbox.config_flow.SignedSession", new=mock_client
@@ -106,7 +106,7 @@ def mock_xbox_live_client(signed_session) -> Generator[AsyncMock]:
with (
patch(
"homeassistant.components.xbox.XboxLiveClient", autospec=True
"homeassistant.components.xbox.coordinator.XboxLiveClient", autospec=True
) as mock_client,
patch(
"homeassistant.components.xbox.config_flow.XboxLiveClient", new=mock_client

View File

@@ -13,7 +13,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.erics273',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -24,12 +24,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273',
'original_name': None,
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.ONLINE: 'online'>,
'unique_id': '2533274913657542_online',
'unit_of_measurement': None,
})
@@ -62,7 +62,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.erics273_in_game',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -73,12 +73,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 In Game',
'original_name': 'In game',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_GAME: 'in_game'>,
'unique_id': '2533274913657542_in_game',
'unit_of_measurement': None,
})
@@ -86,8 +86,7 @@
# name: test_binary_sensors[binary_sensor.erics273_in_game-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 In Game',
'friendly_name': 'erics273 In game',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.erics273_in_game',
@@ -111,7 +110,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.erics273_in_multiplayer',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -122,12 +121,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 In Multiplayer',
'original_name': 'In multiplayer',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_MULTIPLAYER: 'in_multiplayer'>,
'unique_id': '2533274913657542_in_multiplayer',
'unit_of_measurement': None,
})
@@ -135,8 +134,7 @@
# name: test_binary_sensors[binary_sensor.erics273_in_multiplayer-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 In Multiplayer',
'friendly_name': 'erics273 In multiplayer',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.erics273_in_multiplayer',
@@ -160,7 +158,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.erics273_in_party',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -171,12 +169,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 In Party',
'original_name': 'In party',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_PARTY: 'in_party'>,
'unique_id': '2533274913657542_in_party',
'unit_of_measurement': None,
})
@@ -184,8 +182,7 @@
# name: test_binary_sensors[binary_sensor.erics273_in_party-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 In Party',
'friendly_name': 'erics273 In party',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.erics273_in_party',
@@ -209,7 +206,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.gsr_ae',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -220,12 +217,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae',
'original_name': None,
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.ONLINE: 'online'>,
'unique_id': '271958441785640_online',
'unit_of_measurement': None,
})
@@ -258,7 +255,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.gsr_ae_in_game',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -269,12 +266,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae In Game',
'original_name': 'In game',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_GAME: 'in_game'>,
'unique_id': '271958441785640_in_game',
'unit_of_measurement': None,
})
@@ -282,8 +279,7 @@
# name: test_binary_sensors[binary_sensor.gsr_ae_in_game-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae In Game',
'friendly_name': 'GSR Ae In game',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.gsr_ae_in_game',
@@ -307,7 +303,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.gsr_ae_in_multiplayer',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -318,12 +314,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae In Multiplayer',
'original_name': 'In multiplayer',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_MULTIPLAYER: 'in_multiplayer'>,
'unique_id': '271958441785640_in_multiplayer',
'unit_of_measurement': None,
})
@@ -331,8 +327,7 @@
# name: test_binary_sensors[binary_sensor.gsr_ae_in_multiplayer-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae In Multiplayer',
'friendly_name': 'GSR Ae In multiplayer',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.gsr_ae_in_multiplayer',
@@ -356,7 +351,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.gsr_ae_in_party',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -367,12 +362,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae In Party',
'original_name': 'In party',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_PARTY: 'in_party'>,
'unique_id': '271958441785640_in_party',
'unit_of_measurement': None,
})
@@ -380,8 +375,7 @@
# name: test_binary_sensors[binary_sensor.gsr_ae_in_party-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae In Party',
'friendly_name': 'GSR Ae In party',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.gsr_ae_in_party',
@@ -405,7 +399,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.ikken_hissatsuu',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -416,12 +410,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu',
'original_name': None,
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.ONLINE: 'online'>,
'unique_id': '2533274838782903_online',
'unit_of_measurement': None,
})
@@ -454,7 +448,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_game',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -465,12 +459,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu In Game',
'original_name': 'In game',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_GAME: 'in_game'>,
'unique_id': '2533274838782903_in_game',
'unit_of_measurement': None,
})
@@ -478,8 +472,7 @@
# name: test_binary_sensors[binary_sensor.ikken_hissatsuu_in_game-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu In Game',
'friendly_name': 'Ikken Hissatsuu In game',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_game',
@@ -503,7 +496,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_multiplayer',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -514,12 +507,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu In Multiplayer',
'original_name': 'In multiplayer',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_MULTIPLAYER: 'in_multiplayer'>,
'unique_id': '2533274838782903_in_multiplayer',
'unit_of_measurement': None,
})
@@ -527,8 +520,7 @@
# name: test_binary_sensors[binary_sensor.ikken_hissatsuu_in_multiplayer-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu In Multiplayer',
'friendly_name': 'Ikken Hissatsuu In multiplayer',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_multiplayer',
@@ -552,7 +544,7 @@
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_party',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -563,12 +555,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu In Party',
'original_name': 'In party',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxBinarySensor.IN_PARTY: 'in_party'>,
'unique_id': '2533274838782903_in_party',
'unit_of_measurement': None,
})
@@ -576,8 +568,7 @@
# name: test_binary_sensors[binary_sensor.ikken_hissatsuu_in_party-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu In Party',
'friendly_name': 'Ikken Hissatsuu In party',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.ikken_hissatsuu_in_party',

View File

@@ -13,7 +13,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.erics273_account_tier',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -24,12 +24,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 Account Tier',
'original_name': 'Account tier',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.ACCOUNT_TIER: 'account_tier'>,
'unique_id': '2533274913657542_account_tier',
'unit_of_measurement': None,
})
@@ -37,8 +37,7 @@
# name: test_sensors[sensor.erics273_account_tier-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 Account Tier',
'friendly_name': 'erics273 Account tier',
}),
'context': <ANY>,
'entity_id': 'sensor.erics273_account_tier',
@@ -48,7 +47,7 @@
'state': 'Silver',
})
# ---
# name: test_sensors[sensor.erics273_gamer_score-entry]
# name: test_sensors[sensor.erics273_gamerscore-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -61,8 +60,8 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.erics273_gamer_score',
'has_entity_name': False,
'entity_id': 'sensor.erics273_gamerscore',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -73,24 +72,24 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 Gamer Score',
'original_name': 'Gamerscore',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GAMER_SCORE: 'gamer_score'>,
'unique_id': '2533274913657542_gamer_score',
'unit_of_measurement': None,
'unit_of_measurement': 'points',
})
# ---
# name: test_sensors[sensor.erics273_gamer_score-state]
# name: test_sensors[sensor.erics273_gamerscore-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 Gamer Score',
'friendly_name': 'erics273 Gamerscore',
'unit_of_measurement': 'points',
}),
'context': <ANY>,
'entity_id': 'sensor.erics273_gamer_score',
'entity_id': 'sensor.erics273_gamerscore',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -111,7 +110,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.erics273_gold_tenure',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -122,12 +121,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 Gold Tenure',
'original_name': 'Gold tenure',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GOLD_TENURE: 'gold_tenure'>,
'unique_id': '2533274913657542_gold_tenure',
'unit_of_measurement': None,
})
@@ -135,8 +134,7 @@
# name: test_sensors[sensor.erics273_gold_tenure-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 Gold Tenure',
'friendly_name': 'erics273 Gold tenure',
}),
'context': <ANY>,
'entity_id': 'sensor.erics273_gold_tenure',
@@ -160,7 +158,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.erics273_status',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -171,12 +169,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'erics273 Status',
'original_name': 'Status',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.STATUS: 'status'>,
'unique_id': '2533274913657542_status',
'unit_of_measurement': None,
})
@@ -184,7 +182,6 @@
# name: test_sensors[sensor.erics273_status-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=rwljod2fPqLqGP3DBV9F_yK9iuxAt3_MH6tcOnQXTc8LY1LO8JeulzCEFHaqqItKdg9oJ84qjO.VNwvUWuq_iR5iTyx1gQsqHSvWLbqIrRI-&background=0xababab&format=png',
'friendly_name': 'erics273 Status',
}),
'context': <ANY>,
@@ -209,7 +206,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gsr_ae_account_tier',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -220,12 +217,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae Account Tier',
'original_name': 'Account tier',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.ACCOUNT_TIER: 'account_tier'>,
'unique_id': '271958441785640_account_tier',
'unit_of_measurement': None,
})
@@ -233,8 +230,7 @@
# name: test_sensors[sensor.gsr_ae_account_tier-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae Account Tier',
'friendly_name': 'GSR Ae Account tier',
}),
'context': <ANY>,
'entity_id': 'sensor.gsr_ae_account_tier',
@@ -244,7 +240,7 @@
'state': 'Gold',
})
# ---
# name: test_sensors[sensor.gsr_ae_gamer_score-entry]
# name: test_sensors[sensor.gsr_ae_gamerscore-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -257,8 +253,8 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gsr_ae_gamer_score',
'has_entity_name': False,
'entity_id': 'sensor.gsr_ae_gamerscore',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -269,24 +265,24 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae Gamer Score',
'original_name': 'Gamerscore',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GAMER_SCORE: 'gamer_score'>,
'unique_id': '271958441785640_gamer_score',
'unit_of_measurement': None,
'unit_of_measurement': 'points',
})
# ---
# name: test_sensors[sensor.gsr_ae_gamer_score-state]
# name: test_sensors[sensor.gsr_ae_gamerscore-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae Gamer Score',
'friendly_name': 'GSR Ae Gamerscore',
'unit_of_measurement': 'points',
}),
'context': <ANY>,
'entity_id': 'sensor.gsr_ae_gamer_score',
'entity_id': 'sensor.gsr_ae_gamerscore',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -307,7 +303,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gsr_ae_gold_tenure',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -318,12 +314,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae Gold Tenure',
'original_name': 'Gold tenure',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GOLD_TENURE: 'gold_tenure'>,
'unique_id': '271958441785640_gold_tenure',
'unit_of_measurement': None,
})
@@ -331,8 +327,7 @@
# name: test_sensors[sensor.gsr_ae_gold_tenure-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae Gold Tenure',
'friendly_name': 'GSR Ae Gold tenure',
}),
'context': <ANY>,
'entity_id': 'sensor.gsr_ae_gold_tenure',
@@ -356,7 +351,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.gsr_ae_status',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -367,12 +362,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'GSR Ae Status',
'original_name': 'Status',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.STATUS: 'status'>,
'unique_id': '271958441785640_status',
'unit_of_measurement': None,
})
@@ -380,7 +375,6 @@
# name: test_sensors[sensor.gsr_ae_status-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=wHwbXKif8cus8csoZ03RW_ES.ojiJijNBGRVUbTnZKsoCCCkjlsEJrrMqDkYqs3M0aLOK2kxE9mbLm9M2.R0stAQYoDsGCDJxqDzG9WF3oa4rOCjEK7DbZXdBmBWnMrfErA3M_Q4y_mUTEQLqSAEeYFGlGeCXYsccnQMvEecxRg-&format=png',
'friendly_name': 'GSR Ae Status',
}),
'context': <ANY>,
@@ -405,7 +399,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.ikken_hissatsuu_account_tier',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -416,12 +410,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu Account Tier',
'original_name': 'Account tier',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.ACCOUNT_TIER: 'account_tier'>,
'unique_id': '2533274838782903_account_tier',
'unit_of_measurement': None,
})
@@ -429,8 +423,7 @@
# name: test_sensors[sensor.ikken_hissatsuu_account_tier-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu Account Tier',
'friendly_name': 'Ikken Hissatsuu Account tier',
}),
'context': <ANY>,
'entity_id': 'sensor.ikken_hissatsuu_account_tier',
@@ -440,7 +433,7 @@
'state': 'Gold',
})
# ---
# name: test_sensors[sensor.ikken_hissatsuu_gamer_score-entry]
# name: test_sensors[sensor.ikken_hissatsuu_gamerscore-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -453,8 +446,8 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.ikken_hissatsuu_gamer_score',
'has_entity_name': False,
'entity_id': 'sensor.ikken_hissatsuu_gamerscore',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -465,24 +458,24 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu Gamer Score',
'original_name': 'Gamerscore',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GAMER_SCORE: 'gamer_score'>,
'unique_id': '2533274838782903_gamer_score',
'unit_of_measurement': None,
'unit_of_measurement': 'points',
})
# ---
# name: test_sensors[sensor.ikken_hissatsuu_gamer_score-state]
# name: test_sensors[sensor.ikken_hissatsuu_gamerscore-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu Gamer Score',
'friendly_name': 'Ikken Hissatsuu Gamerscore',
'unit_of_measurement': 'points',
}),
'context': <ANY>,
'entity_id': 'sensor.ikken_hissatsuu_gamer_score',
'entity_id': 'sensor.ikken_hissatsuu_gamerscore',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -503,7 +496,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.ikken_hissatsuu_gold_tenure',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -514,12 +507,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu Gold Tenure',
'original_name': 'Gold tenure',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.GOLD_TENURE: 'gold_tenure'>,
'unique_id': '2533274838782903_gold_tenure',
'unit_of_measurement': None,
})
@@ -527,8 +520,7 @@
# name: test_sensors[sensor.ikken_hissatsuu_gold_tenure-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu Gold Tenure',
'friendly_name': 'Ikken Hissatsuu Gold tenure',
}),
'context': <ANY>,
'entity_id': 'sensor.ikken_hissatsuu_gold_tenure',
@@ -552,7 +544,7 @@
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.ikken_hissatsuu_status',
'has_entity_name': False,
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
@@ -563,12 +555,12 @@
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Ikken Hissatsuu Status',
'original_name': 'Status',
'platform': 'xbox',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': <XboxSensor.STATUS: 'status'>,
'unique_id': '2533274838782903_status',
'unit_of_measurement': None,
})
@@ -576,7 +568,6 @@
# name: test_sensors[sensor.ikken_hissatsuu_status-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'entity_picture': 'https://images-eds-ssl.xboxlive.com/image?url=7OTVnZUMVj4OV2zUUGecWvn3U00nQQLfK7_kwpANogj9vJpb.t4ZQMMLIWOuBZBBZs5MjD7okwh5Zwnit1SAtO3OAsFXxJc1ALIbaVoRo7gsiun9FdcaTpzkM60nqzT8ip1659eQpB1SLyupscP.ec_wAGvXwkhCcTKCNHQMrxg-&format=png',
'friendly_name': 'Ikken Hissatsuu Status',
}),
'context': <ANY>,

View File

@@ -0,0 +1,66 @@
"""Tests for the Xbox integration."""
from unittest.mock import AsyncMock, patch
from httpx import ConnectTimeout, HTTPStatusError, ProtocolError
import pytest
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
@pytest.mark.usefixtures("xbox_live_client")
async def test_entry_setup_unload(
hass: HomeAssistant, config_entry: MockConfigEntry
) -> None:
"""Test integration setup and unload."""
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(config_entry.entry_id)
assert config_entry.state is ConfigEntryState.NOT_LOADED
@pytest.mark.parametrize(
"exception",
[ConnectTimeout, HTTPStatusError, ProtocolError],
)
async def test_config_entry_not_ready(
hass: HomeAssistant,
config_entry: MockConfigEntry,
xbox_live_client: AsyncMock,
exception: Exception,
) -> None:
"""Test config entry not ready."""
xbox_live_client.smartglass.get_console_list.side_effect = exception
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_RETRY
@pytest.mark.usefixtures("xbox_live_client")
async def test_config_implementation_not_available(
hass: HomeAssistant,
config_entry: MockConfigEntry,
) -> None:
"""Test implementation not available."""
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.xbox.coordinator.config_entry_oauth2_flow.async_get_config_entry_implementation",
side_effect=ValueError("Implementation not available"),
):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.SETUP_RETRY

View File

@@ -0,0 +1,280 @@
"""Tests for zeroconf repair issues."""
from unittest.mock import patch
import pytest
from zeroconf import ServiceStateChange
from zeroconf.asyncio import AsyncServiceInfo
from homeassistant.components.homeassistant import DOMAIN as HOMEASSISTANT_DOMAIN
from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN
from homeassistant.components.zeroconf import DOMAIN, discovery, repairs
from homeassistant.components.zeroconf.discovery import ZEROCONF_TYPE
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import instance_id, issue_registry as ir
from homeassistant.setup import async_setup_component
from .test_init import service_update_mock
from tests.components.repairs import (
async_process_repairs_platforms,
process_repair_fix_flow,
start_repair_fix_flow,
)
from tests.typing import ClientSessionGenerator
def service_state_change_mock(
zeroconf,
services,
handlers,
*,
state_change: ServiceStateChange = ServiceStateChange.Removed,
) -> None:
"""Call service update handler."""
for service in services:
handlers[0](zeroconf, service, f"_name.{service}", state_change)
def _get_hass_service_info_mock(
service_type: str,
name: str,
*,
instance_id="abc123",
) -> AsyncServiceInfo:
"""Return service info for Home Assistant instance."""
return AsyncServiceInfo(
ZEROCONF_TYPE,
name,
addresses=[b"\n\x00\x00\x01"],
port=8123,
weight=0,
priority=0,
server="other-host.local.",
properties={
"base_url": "http://10.0.0.1:8123",
"external_url": None,
"internal_url": "http://10.0.0.1:8123",
"location_name": "Home",
"requires_api_password": "True",
"uuid": instance_id,
"version": "2025.9.0.dev0",
},
)
@pytest.mark.usefixtures("mock_async_zeroconf")
async def test_instance_id_conflict_creates_repair_issue_remove(
hass: HomeAssistant, issue_registry: ir.IssueRegistry
) -> None:
"""Test that a repair issue is created on instance ID conflict and gets removed when instance disappears."""
with (
patch("homeassistant.helpers.instance_id.async_get", return_value="abc123"),
patch.object(
discovery, "AsyncServiceBrowser", side_effect=service_update_mock
) as mock_browser,
patch.object(hass.config_entries.flow, "async_init"),
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=_get_hass_service_info_mock,
),
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
issue = issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
assert issue
assert issue.severity == ir.IssueSeverity.ERROR
assert issue.translation_key == "duplicate_instance_id"
assert issue.translation_placeholders == {
"other_host_url": "other-host.local",
"other_ip": "10.0.0.1",
"instance_id": "abc123",
}
# Now test that the issue is removed when the service goes away
service_state_change_mock(
mock_browser.call_args[0][0],
[ZEROCONF_TYPE],
mock_browser.call_args[1]["handlers"],
)
assert (
issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
is None
)
@pytest.mark.usefixtures("mock_async_zeroconf")
async def test_instance_id_conflict_creates_repair_issue_changing_id(
hass: HomeAssistant, issue_registry: ir.IssueRegistry
) -> None:
"""Test that a repair issue is created on instance ID conflict and gets removed when instance ID changes."""
with (
patch("homeassistant.helpers.instance_id.async_get", return_value="abc123"),
patch.object(
discovery, "AsyncServiceBrowser", side_effect=service_update_mock
) as mock_browser,
patch.object(hass.config_entries.flow, "async_init"),
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=_get_hass_service_info_mock,
),
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
issue = issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
assert issue
assert issue.severity == ir.IssueSeverity.ERROR
assert issue.translation_key == "duplicate_instance_id"
assert issue.translation_placeholders == {
"other_host_url": "other-host.local",
"other_ip": "10.0.0.1",
"instance_id": "abc123",
}
with (
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=lambda service_type, name: _get_hass_service_info_mock(
service_type, name, instance_id="different-id"
),
),
):
# Now test that the issue is removed when the service goes away
service_state_change_mock(
mock_browser.call_args[0][0],
[ZEROCONF_TYPE],
mock_browser.call_args[1]["handlers"],
state_change=ServiceStateChange.Updated,
)
assert (
issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
is None
)
@pytest.mark.usefixtures("mock_async_zeroconf")
async def test_instance_id_no_repair_issue_own_ip(
hass: HomeAssistant, issue_registry: ir.IssueRegistry
) -> None:
"""Test that no repair issue is created when the other instance ID matches our IP."""
with (
patch("homeassistant.helpers.instance_id.async_get", return_value="abc123"),
patch.object(discovery, "AsyncServiceBrowser", side_effect=service_update_mock),
patch.object(hass.config_entries.flow, "async_init"),
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=_get_hass_service_info_mock,
),
patch(
"homeassistant.components.network.async_get_announce_addresses",
return_value=["10.0.0.1", "10.0.0.2"],
),
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert (
issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
is None
)
@pytest.mark.usefixtures("mock_async_zeroconf")
async def test_instance_id_no_conflict_no_repair_issue(
hass: HomeAssistant, issue_registry: ir.IssueRegistry
) -> None:
"""Test that a repair issue is not created when no instance ID conflict exists."""
with (
patch("homeassistant.helpers.instance_id.async_get", return_value="xyz123"),
patch.object(discovery, "AsyncServiceBrowser", side_effect=service_update_mock),
patch.object(hass.config_entries.flow, "async_init"),
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=_get_hass_service_info_mock,
),
patch("homeassistant.helpers.issue_registry.async_create_issue"),
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert (
issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
is None
)
async def test_create_fix_flow_raises_on_unknown_issue_id(hass: HomeAssistant) -> None:
"""Test create_fix_flow raises on unknown issue_id."""
with pytest.raises(ValueError):
await repairs.async_create_fix_flow(hass, "no_such_issue", None)
@pytest.mark.usefixtures("mock_async_zeroconf")
async def test_duplicate_repair_issue_repair_flow(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
issue_registry: ir.IssueRegistry,
) -> None:
"""Test desired flow of the fix flow for duplicate instance ID."""
assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}})
assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {})
await async_process_repairs_platforms(hass)
with (
patch("homeassistant.helpers.instance_id.async_get", return_value="abc123"),
patch.object(discovery, "AsyncServiceBrowser", side_effect=service_update_mock),
patch.object(hass.config_entries.flow, "async_init"),
patch(
"homeassistant.components.zeroconf.discovery.AsyncServiceInfo",
side_effect=_get_hass_service_info_mock,
),
patch.object(
instance_id, "async_recreate", return_value="new-uuid"
) as mock_recreate,
patch("homeassistant.config.async_check_ha_config_file", return_value=None),
patch("homeassistant.core.HomeAssistant.async_stop", return_value=None),
):
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
issue = issue_registry.async_get_issue(
domain="zeroconf", issue_id="duplicate_instance_id"
)
assert issue is not None
client = await hass_client()
result = await start_repair_fix_flow(client, DOMAIN, issue.issue_id)
flow_id = result["flow_id"]
assert result["type"] == FlowResultType.FORM
assert result["step_id"] == "confirm_recreate"
result = await process_repair_fix_flow(client, flow_id, json={})
assert result["type"] == "create_entry"
await hass.async_block_till_done()
assert mock_recreate.called

View File

@@ -83,3 +83,15 @@ async def test_get_id_migrate_fail(
"Could not read hass instance ID from 'core.uuid' or '.uuid', a "
"new instance ID will be generated" in caplog.text
)
async def test_async_recreate(
hass: HomeAssistant, hass_storage: dict[str, Any]
) -> None:
"""Test recreating instance ID."""
uuid1 = await instance_id.async_get(hass)
uuid2 = await instance_id.async_recreate(hass)
assert uuid1 != uuid2
# Assert it's stored
assert hass_storage["core.uuid"]["data"]["uuid"] == uuid2

View File

@@ -1,6 +1,8 @@
"""Test check_config script."""
import json
import logging
import os
from unittest.mock import patch
import pytest
@@ -180,3 +182,463 @@ def test_bootstrap_error() -> None:
assert res["secrets"] == {}
assert res["warn"] == {}
assert res["yaml_files"] == {}
@pytest.mark.parametrize("hass_config_yaml", [BASE_CONFIG])
@pytest.mark.usefixtures("mock_is_file", "mock_hass_config_yaml")
def test_run_json_flag_only() -> None:
"""Test that --json flag works independently."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {"domain1": ["error1", "error2"]},
"warn": {"domain2": ["warning1"]},
"components": {"homeassistant": {}, "light": {}, "http": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--json"])
# Should exit with code 1 (1 domain with errors)
assert exit_code == 1
# Should have printed JSON
assert mock_print.call_count == 1
json_output = mock_print.call_args[0][0]
# Verify it's valid JSON
parsed_json = json.loads(json_output)
# Verify JSON structure
assert "config_dir" in parsed_json
assert "total_errors" in parsed_json
assert "total_warnings" in parsed_json
assert "errors" in parsed_json
assert "warnings" in parsed_json
assert "components" in parsed_json
# Verify JSON content
assert parsed_json["total_errors"] == 2 # 2 error messages
assert parsed_json["total_warnings"] == 1 # 1 warning message
assert parsed_json["errors"] == {"domain1": ["error1", "error2"]}
assert parsed_json["warnings"] == {"domain2": ["warning1"]}
assert set(parsed_json["components"]) == {"homeassistant", "light", "http"}
@pytest.mark.parametrize("hass_config_yaml", [BASE_CONFIG])
@pytest.mark.usefixtures("mock_is_file", "mock_hass_config_yaml")
def test_run_fail_on_warnings_flag_only() -> None:
"""Test that --fail-on-warnings flag works independently."""
# Test with warnings only
with patch.object(check_config, "check") as mock_check:
mock_check.return_value = {
"except": {},
"warn": {"light": ["warning message"]},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--fail-on-warnings"])
assert exit_code == 1 # Should exit non-zero due to warnings
# Test with no warnings or errors
with patch.object(check_config, "check") as mock_check:
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--fail-on-warnings"])
assert exit_code == 0 # Should exit zero when no warnings/errors
# Test with both errors and warnings
with patch.object(check_config, "check") as mock_check:
mock_check.return_value = {
"except": {"domain1": ["error"]},
"warn": {"domain2": ["warning"]},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--fail-on-warnings"])
assert exit_code == 1 # max(1, 1) = 1
@pytest.mark.parametrize("hass_config_yaml", [BASE_CONFIG])
@pytest.mark.usefixtures("mock_is_file", "mock_hass_config_yaml")
def test_run_json_output_structure() -> None:
"""Test JSON output contains all required fields with correct types."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {"domain1": ["error1", {"config": "bad"}]},
"warn": {"domain2": ["warning1", {"config": "deprecated"}]},
"components": {"homeassistant": {}, "light": {}, "automation": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--json", "--config", "/test/path"])
json_output = mock_print.call_args[0][0]
parsed_json = json.loads(json_output)
# Should exit with code 1 due to errors
assert exit_code == 1
# Test all required fields are present
required_fields = [
"config_dir",
"total_errors",
"total_warnings",
"errors",
"warnings",
"components",
]
for field in required_fields:
assert field in parsed_json, f"Missing required field: {field}"
# Test field types and values
assert isinstance(parsed_json["config_dir"], str)
assert isinstance(parsed_json["total_errors"], int)
assert isinstance(parsed_json["total_warnings"], int)
assert isinstance(parsed_json["errors"], dict)
assert isinstance(parsed_json["warnings"], dict)
assert isinstance(parsed_json["components"], list)
# Test counts are correct
assert parsed_json["total_errors"] == 2 # 2 items in domain1 list
assert parsed_json["total_warnings"] == 2 # 2 items in domain2 list
# Test components is a list of strings
assert all(isinstance(comp, str) for comp in parsed_json["components"])
assert set(parsed_json["components"]) == {
"homeassistant",
"light",
"automation",
}
def test_run_exit_code_logic() -> None:
"""Test exit code logic for all flag combinations."""
test_cases = [
# (errors, warnings, flags, expected_exit_code)
({}, {}, [], 0), # No errors, no warnings, no flags
({}, {}, ["--json"], 0), # No errors, no warnings, json only
(
{},
{},
["--fail-on-warnings"],
0,
), # No errors, no warnings, fail-on-warnings only
(
{},
{},
["--json", "--fail-on-warnings"],
0,
), # No errors, no warnings, both flags
(
{},
{"domain": ["warning"]},
[],
0,
), # Warnings only, no flags (backwards compatible)
({}, {"domain": ["warning"]}, ["--json"], 0), # Warnings only, json only
(
{},
{"domain": ["warning"]},
["--fail-on-warnings"],
1,
), # Warnings only, fail-on-warnings
(
{},
{"domain": ["warning"]},
["--json", "--fail-on-warnings"],
1,
), # Warnings only, both flags
({"domain": ["error"]}, {}, [], 1), # Errors only, no flags
({"domain": ["error"]}, {}, ["--json"], 1), # Errors only, json only
(
{"domain": ["error"]},
{},
["--fail-on-warnings"],
1,
), # Errors only, fail-on-warnings
(
{"domain": ["error"]},
{},
["--json", "--fail-on-warnings"],
1,
), # Errors only, both flags
({"domain": ["error"]}, {"domain2": ["warning"]}, [], 1), # Both, no flags
(
{"domain": ["error"]},
{"domain2": ["warning"]},
["--json"],
1,
), # Both, json only
(
{"domain": ["error"]},
{"domain2": ["warning"]},
["--fail-on-warnings"],
1,
), # Both, fail-on-warnings
(
{"domain": ["error"]},
{"domain2": ["warning"]},
["--json", "--fail-on-warnings"],
1,
), # Both, both flags
({"d1": ["e1"], "d2": ["e2"]}, {}, [], 1), # Multiple error domains, no flags
(
{"d1": ["e1"], "d2": ["e2"]},
{"d3": ["w1"]},
["--fail-on-warnings"],
1,
), # Multiple errors + warnings
]
for errors, warnings, flags, expected_exit in test_cases:
with patch("builtins.print"), patch.object(check_config, "check") as mock_check:
mock_check.return_value = {
"except": errors,
"warn": warnings,
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(flags)
assert exit_code == expected_exit, (
f"Failed for errors={errors}, warnings={warnings}, flags={flags}. "
f"Expected {expected_exit}, got {exit_code}"
)
@pytest.mark.parametrize("hass_config_yaml", [BASE_CONFIG])
@pytest.mark.usefixtures("mock_is_file", "mock_hass_config_yaml")
def test_run_human_readable_still_works() -> None:
"""Test that human-readable output still works without JSON flag."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
check_config.run([])
# Should print the "Testing configuration at" message
printed_outputs = [
call[0][0] if call[0] else "" for call in mock_print.call_args_list
]
testing_message_found = any(
"Testing configuration at" in output for output in printed_outputs
)
assert testing_message_found, (
"Human-readable 'Testing configuration at' message not found"
)
def test_run_with_config_path() -> None:
"""Test that config path is correctly included in JSON output."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
test_config_path = "/custom/config/path"
check_config.run(["--json", "--config", test_config_path])
json_output = mock_print.call_args[0][0]
parsed_json = json.loads(json_output)
# The config_dir should include the full path
expected_path = os.path.join(os.getcwd(), test_config_path)
assert parsed_json["config_dir"] == expected_path
# Flag Interaction Tests
def test_unknown_arguments_with_json() -> None:
"""Test that unknown arguments are handled properly with JSON flag."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
check_config.run(["--json", "--unknown-flag", "value"])
# Should still print unknown argument warning AND JSON
assert mock_print.call_count == 2
# First call should be the unknown argument warning
unknown_warning = mock_print.call_args_list[0][0][0]
assert "Unknown arguments" in unknown_warning
assert "unknown-flag" in unknown_warning
# Second call should be valid JSON
json_output = mock_print.call_args_list[1][0][0]
parsed_json = json.loads(json_output)
assert "config_dir" in parsed_json
@pytest.mark.parametrize("hass_config_yaml", [BASE_CONFIG])
@pytest.mark.usefixtures("mock_is_file", "mock_hass_config_yaml")
def test_info_flag_with_json() -> None:
"""Test how --info flag interacts with --json."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}, "light": {"platform": "demo"}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
# Test --json with --info - JSON should take precedence
exit_code = check_config.run(["--json", "--info", "light"])
assert exit_code == 0
assert mock_print.call_count == 1
# Should be JSON output, not info output
json_output = json.loads(mock_print.call_args[0][0])
assert "config_dir" in json_output
assert "components" in json_output
assert "light" in json_output["components"]
def test_config_flag_variations() -> None:
"""Test different ways to specify config directory."""
test_cases = [
(["-c", "/test/path"], "/test/path"),
(["--config", "/test/path"], "/test/path"),
(["--json", "-c", "relative/path"], "relative/path"),
(["--config", ".", "--json"], "."),
]
for flags, expected_config_part in test_cases:
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
check_config.run(flags)
if "--json" in flags:
json_output = json.loads(mock_print.call_args[0][0])
expected_full_path = os.path.join(os.getcwd(), expected_config_part)
assert json_output["config_dir"] == expected_full_path
def test_multiple_config_flags() -> None:
"""Test behavior with multiple config directory specifications."""
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": {},
"warn": {},
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
# Last config flag should win
check_config.run(
["--json", "--config", "/first/path", "--config", "/second/path"]
)
json_output = json.loads(mock_print.call_args[0][0])
expected_path = os.path.join(os.getcwd(), "/second/path")
assert json_output["config_dir"] == expected_path
def test_fail_on_warnings_with_json_combinations() -> None:
"""Test --fail-on-warnings with --json in various scenarios."""
test_scenarios = [
# (errors, warnings, expected_exit_code)
({}, {}, 0),
({"domain1": ["error"]}, {}, 1),
({}, {"domain1": ["warning"]}, 1), # With --fail-on-warnings
({"d1": ["e1"]}, {"d2": ["w1"]}, 1), # Errors still take precedence
({"d1": ["e1"], "d2": ["e2"]}, {"d3": ["w1"]}, 1), # Multiple errors > warnings
]
for errors, warnings, expected_exit in test_scenarios:
with (
patch("builtins.print") as mock_print,
patch.object(check_config, "check") as mock_check,
):
mock_check.return_value = {
"except": errors,
"warn": warnings,
"components": {"homeassistant": {}},
"secrets": {},
"secret_cache": {},
"yaml_files": {},
}
exit_code = check_config.run(["--json", "--fail-on-warnings"])
assert exit_code == expected_exit
# Should still output valid JSON
json_output = json.loads(mock_print.call_args[0][0])
assert json_output["total_errors"] == sum(len(e) for e in errors.values())
assert json_output["total_warnings"] == sum(
len(w) for w in warnings.values()
)