mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 08:47:09 +00:00
Merge pull request #59129 from home-assistant/rc
This commit is contained in:
commit
2c21f0ad18
@ -20,13 +20,12 @@ async def validate_input(data):
|
|||||||
lat = data.get(CONF_LATITUDE)
|
lat = data.get(CONF_LATITUDE)
|
||||||
lon = data.get(CONF_LONGITUDE)
|
lon = data.get(CONF_LONGITUDE)
|
||||||
station = data.get(CONF_STATION)
|
station = data.get(CONF_STATION)
|
||||||
lang = data.get(CONF_LANGUAGE)
|
lang = data.get(CONF_LANGUAGE).lower()
|
||||||
|
|
||||||
weather_data = ECWeather(
|
if station:
|
||||||
station_id=station,
|
weather_data = ECWeather(station_id=station, language=lang)
|
||||||
coordinates=(lat, lon),
|
else:
|
||||||
language=lang.lower(),
|
weather_data = ECWeather(coordinates=(lat, lon), language=lang)
|
||||||
)
|
|
||||||
await weather_data.update()
|
await weather_data.update()
|
||||||
|
|
||||||
if lat is None or lon is None:
|
if lat is None or lon is None:
|
||||||
|
@ -182,7 +182,7 @@ class FluxLedUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
name=self.device.ipaddr,
|
name=self.device.ipaddr,
|
||||||
update_interval=timedelta(seconds=5),
|
update_interval=timedelta(seconds=10),
|
||||||
# We don't want an immediate refresh since the device
|
# We don't want an immediate refresh since the device
|
||||||
# takes a moment to reflect the state change
|
# takes a moment to reflect the state change
|
||||||
request_refresh_debouncer=Debouncer(
|
request_refresh_debouncer=Debouncer(
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Flux LED/MagicHome",
|
"name": "Flux LED/MagicHome",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
"documentation": "https://www.home-assistant.io/integrations/flux_led",
|
||||||
"requirements": ["flux_led==0.24.13"],
|
"requirements": ["flux_led==0.24.14"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"codeowners": ["@icemanch"],
|
"codeowners": ["@icemanch"],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
|
@ -39,8 +39,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
# strip out the stale options CONF_RESOURCES
|
# strip out the stale options CONF_RESOURCES
|
||||||
if CONF_RESOURCES in entry.options:
|
if CONF_RESOURCES in entry.options:
|
||||||
|
new_data = {**entry.data, CONF_RESOURCES: entry.options[CONF_RESOURCES]}
|
||||||
new_options = {k: v for k, v in entry.options.items() if k != CONF_RESOURCES}
|
new_options = {k: v for k, v in entry.options.items() if k != CONF_RESOURCES}
|
||||||
hass.config_entries.async_update_entry(entry, options=new_options)
|
hass.config_entries.async_update_entry(
|
||||||
|
entry, data=new_data, options=new_options
|
||||||
|
)
|
||||||
|
|
||||||
config = entry.data
|
config = entry.data
|
||||||
host = config[CONF_HOST]
|
host = config[CONF_HOST]
|
||||||
|
@ -189,7 +189,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
user_input[CONF_API_KEY] = await octoprint.request_app_key(
|
user_input[CONF_API_KEY] = await octoprint.request_app_key(
|
||||||
"Home Assistant", user_input[CONF_USERNAME], 30
|
"Home Assistant", user_input[CONF_USERNAME], 300
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
# Continue the flow after show progress when the task is done.
|
# Continue the flow after show progress when the task is done.
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Spain electricity hourly pricing (PVPC)",
|
"name": "Spain electricity hourly pricing (PVPC)",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/pvpc_hourly_pricing",
|
"documentation": "https://www.home-assistant.io/integrations/pvpc_hourly_pricing",
|
||||||
"requirements": ["aiopvpc==2.2.0"],
|
"requirements": ["aiopvpc==2.2.1"],
|
||||||
"codeowners": ["@azogue"],
|
"codeowners": ["@azogue"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"iot_class": "cloud_polling"
|
"iot_class": "cloud_polling"
|
||||||
|
@ -793,7 +793,7 @@ class Recorder(threading.Thread):
|
|||||||
if statistics.add_external_statistics(self, metadata, stats):
|
if statistics.add_external_statistics(self, metadata, stats):
|
||||||
return
|
return
|
||||||
# Schedule a new statistics task if this one didn't finish
|
# Schedule a new statistics task if this one didn't finish
|
||||||
self.queue.put(StatisticsTask(metadata, stats))
|
self.queue.put(ExternalStatisticsTask(metadata, stats))
|
||||||
|
|
||||||
def _process_one_event(self, event):
|
def _process_one_event(self, event):
|
||||||
"""Process one event."""
|
"""Process one event."""
|
||||||
|
@ -12,6 +12,7 @@ from sqlalchemy.exc import (
|
|||||||
SQLAlchemyError,
|
SQLAlchemyError,
|
||||||
)
|
)
|
||||||
from sqlalchemy.schema import AddConstraint, DropConstraint
|
from sqlalchemy.schema import AddConstraint, DropConstraint
|
||||||
|
from sqlalchemy.sql.expression import true
|
||||||
|
|
||||||
from .models import (
|
from .models import (
|
||||||
SCHEMA_VERSION,
|
SCHEMA_VERSION,
|
||||||
@ -24,7 +25,7 @@ from .models import (
|
|||||||
StatisticsShortTerm,
|
StatisticsShortTerm,
|
||||||
process_timestamp,
|
process_timestamp,
|
||||||
)
|
)
|
||||||
from .statistics import get_metadata_with_session, get_start_time
|
from .statistics import get_start_time
|
||||||
from .util import session_scope
|
from .util import session_scope
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -558,21 +559,25 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
|||||||
session.add(StatisticsRuns(start=fake_start_time))
|
session.add(StatisticsRuns(start=fake_start_time))
|
||||||
fake_start_time += timedelta(minutes=5)
|
fake_start_time += timedelta(minutes=5)
|
||||||
|
|
||||||
# Copy last hourly statistic to the newly created 5-minute statistics table
|
# When querying the database, be careful to only explicitly query for columns
|
||||||
sum_statistics = get_metadata_with_session(
|
# which were present in schema version 21. If querying the table, SQLAlchemy
|
||||||
instance.hass, session, statistic_type="sum"
|
# will refer to future columns.
|
||||||
)
|
for sum_statistic in session.query(StatisticsMeta.id).filter_by(has_sum=true()):
|
||||||
for metadata_id, _ in sum_statistics.values():
|
|
||||||
last_statistic = (
|
last_statistic = (
|
||||||
session.query(Statistics)
|
session.query(
|
||||||
.filter_by(metadata_id=metadata_id)
|
Statistics.start,
|
||||||
|
Statistics.last_reset,
|
||||||
|
Statistics.state,
|
||||||
|
Statistics.sum,
|
||||||
|
)
|
||||||
|
.filter_by(metadata_id=sum_statistic.id)
|
||||||
.order_by(Statistics.start.desc())
|
.order_by(Statistics.start.desc())
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if last_statistic:
|
if last_statistic:
|
||||||
session.add(
|
session.add(
|
||||||
StatisticsShortTerm(
|
StatisticsShortTerm(
|
||||||
metadata_id=last_statistic.metadata_id,
|
metadata_id=sum_statistic.id,
|
||||||
start=last_statistic.start,
|
start=last_statistic.start,
|
||||||
last_reset=last_statistic.last_reset,
|
last_reset=last_statistic.last_reset,
|
||||||
state=last_statistic.state,
|
state=last_statistic.state,
|
||||||
|
@ -49,7 +49,7 @@ MIN_VERSION_MARIA_DB_ROWNUM = AwesomeVersion("10.2.0", AwesomeVersionStrategy.SI
|
|||||||
MIN_VERSION_MYSQL = AwesomeVersion("8.0.0", AwesomeVersionStrategy.SIMPLEVER)
|
MIN_VERSION_MYSQL = AwesomeVersion("8.0.0", AwesomeVersionStrategy.SIMPLEVER)
|
||||||
MIN_VERSION_MYSQL_ROWNUM = AwesomeVersion("5.8.0", AwesomeVersionStrategy.SIMPLEVER)
|
MIN_VERSION_MYSQL_ROWNUM = AwesomeVersion("5.8.0", AwesomeVersionStrategy.SIMPLEVER)
|
||||||
MIN_VERSION_PGSQL = AwesomeVersion("12.0", AwesomeVersionStrategy.SIMPLEVER)
|
MIN_VERSION_PGSQL = AwesomeVersion("12.0", AwesomeVersionStrategy.SIMPLEVER)
|
||||||
MIN_VERSION_SQLITE = AwesomeVersion("3.32.1", AwesomeVersionStrategy.SIMPLEVER)
|
MIN_VERSION_SQLITE = AwesomeVersion("3.31.0", AwesomeVersionStrategy.SIMPLEVER)
|
||||||
MIN_VERSION_SQLITE_ROWNUM = AwesomeVersion("3.25.0", AwesomeVersionStrategy.SIMPLEVER)
|
MIN_VERSION_SQLITE_ROWNUM = AwesomeVersion("3.25.0", AwesomeVersionStrategy.SIMPLEVER)
|
||||||
|
|
||||||
# This is the maximum time after the recorder ends the session
|
# This is the maximum time after the recorder ends the session
|
||||||
@ -295,7 +295,7 @@ def _warn_unsupported_dialect(dialect):
|
|||||||
"Starting with Home Assistant 2022.2 this will prevent the recorder from "
|
"Starting with Home Assistant 2022.2 this will prevent the recorder from "
|
||||||
"starting. Please migrate your database to a supported software before then",
|
"starting. Please migrate your database to a supported software before then",
|
||||||
dialect,
|
dialect,
|
||||||
"MariaDB ≥ 10.3, MySQL ≥ 8.0, PostgreSQL ≥ 12, SQLite ≥ 3.32.1",
|
"MariaDB ≥ 10.3, MySQL ≥ 8.0, PostgreSQL ≥ 12, SQLite ≥ 3.31.0",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -138,11 +138,6 @@ class SharkVacuumEntity(CoordinatorEntity, StateVacuumEntity):
|
|||||||
"""Flag vacuum cleaner robot features that are supported."""
|
"""Flag vacuum cleaner robot features that are supported."""
|
||||||
return SUPPORT_SHARKIQ
|
return SUPPORT_SHARKIQ
|
||||||
|
|
||||||
@property
|
|
||||||
def is_docked(self) -> bool | None:
|
|
||||||
"""Is vacuum docked."""
|
|
||||||
return self.sharkiq.get_property_value(Properties.DOCKED_STATUS)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def error_code(self) -> int | None:
|
def error_code(self) -> int | None:
|
||||||
"""Return the last observed error code (or None)."""
|
"""Return the last observed error code (or None)."""
|
||||||
@ -175,7 +170,7 @@ class SharkVacuumEntity(CoordinatorEntity, StateVacuumEntity):
|
|||||||
In the app, these are (usually) handled by showing the robot as stopped and sending the
|
In the app, these are (usually) handled by showing the robot as stopped and sending the
|
||||||
user a notification.
|
user a notification.
|
||||||
"""
|
"""
|
||||||
if self.is_docked:
|
if self.sharkiq.get_property_value(Properties.CHARGING_STATUS):
|
||||||
return STATE_DOCKED
|
return STATE_DOCKED
|
||||||
return self.operating_mode
|
return self.operating_mode
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "velbus",
|
"domain": "velbus",
|
||||||
"name": "Velbus",
|
"name": "Velbus",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
"documentation": "https://www.home-assistant.io/integrations/velbus",
|
||||||
"requirements": ["velbus-aio==2021.10.7"],
|
"requirements": ["velbus-aio==2021.11.0"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"codeowners": ["@Cereal2nd", "@brefra"],
|
"codeowners": ["@Cereal2nd", "@brefra"],
|
||||||
"iot_class": "local_push"
|
"iot_class": "local_push"
|
||||||
|
@ -29,6 +29,7 @@ from .const import (
|
|||||||
MODELS_HUMIDIFIER_MJJSQ,
|
MODELS_HUMIDIFIER_MJJSQ,
|
||||||
MODELS_VACUUM,
|
MODELS_VACUUM,
|
||||||
MODELS_VACUUM_WITH_MOP,
|
MODELS_VACUUM_WITH_MOP,
|
||||||
|
MODELS_VACUUM_WITH_SEPARATE_MOP,
|
||||||
)
|
)
|
||||||
from .device import XiaomiCoordinatedMiioEntity
|
from .device import XiaomiCoordinatedMiioEntity
|
||||||
|
|
||||||
@ -77,7 +78,7 @@ FAN_ZA5_BINARY_SENSORS = (ATTR_POWERSUPPLY_ATTACHED,)
|
|||||||
|
|
||||||
VACUUM_SENSORS = {
|
VACUUM_SENSORS = {
|
||||||
ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription(
|
ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription(
|
||||||
key=ATTR_MOP_ATTACHED,
|
key=ATTR_WATER_BOX_ATTACHED,
|
||||||
name="Mop Attached",
|
name="Mop Attached",
|
||||||
icon="mdi:square-rounded",
|
icon="mdi:square-rounded",
|
||||||
parent_key=VacuumCoordinatorDataAttributes.status,
|
parent_key=VacuumCoordinatorDataAttributes.status,
|
||||||
@ -105,6 +106,19 @@ VACUUM_SENSORS = {
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
VACUUM_SENSORS_SEPARATE_MOP = {
|
||||||
|
**VACUUM_SENSORS,
|
||||||
|
ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription(
|
||||||
|
key=ATTR_MOP_ATTACHED,
|
||||||
|
name="Mop Attached",
|
||||||
|
icon="mdi:square-rounded",
|
||||||
|
parent_key=VacuumCoordinatorDataAttributes.status,
|
||||||
|
entity_registry_enabled_default=True,
|
||||||
|
device_class=DEVICE_CLASS_CONNECTIVITY,
|
||||||
|
entity_category=ENTITY_CATEGORY_DIAGNOSTIC,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
HUMIDIFIER_MIIO_BINARY_SENSORS = (ATTR_WATER_TANK_DETACHED,)
|
HUMIDIFIER_MIIO_BINARY_SENSORS = (ATTR_WATER_TANK_DETACHED,)
|
||||||
HUMIDIFIER_MIOT_BINARY_SENSORS = (ATTR_WATER_TANK_DETACHED,)
|
HUMIDIFIER_MIOT_BINARY_SENSORS = (ATTR_WATER_TANK_DETACHED,)
|
||||||
HUMIDIFIER_MJJSQ_BINARY_SENSORS = (ATTR_NO_WATER, ATTR_WATER_TANK_DETACHED)
|
HUMIDIFIER_MJJSQ_BINARY_SENSORS = (ATTR_NO_WATER, ATTR_WATER_TANK_DETACHED)
|
||||||
@ -118,8 +132,12 @@ def _setup_vacuum_sensors(hass, config_entry, async_add_entities):
|
|||||||
device = hass.data[DOMAIN][config_entry.entry_id].get(KEY_DEVICE)
|
device = hass.data[DOMAIN][config_entry.entry_id].get(KEY_DEVICE)
|
||||||
coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR]
|
coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR]
|
||||||
entities = []
|
entities = []
|
||||||
|
sensors = VACUUM_SENSORS
|
||||||
|
|
||||||
for sensor, description in VACUUM_SENSORS.items():
|
if config_entry.data[CONF_MODEL] in MODELS_VACUUM_WITH_SEPARATE_MOP:
|
||||||
|
sensors = VACUUM_SENSORS_SEPARATE_MOP
|
||||||
|
|
||||||
|
for sensor, description in sensors.items():
|
||||||
parent_key_data = getattr(coordinator.data, description.parent_key)
|
parent_key_data = getattr(coordinator.data, description.parent_key)
|
||||||
if getattr(parent_key_data, description.key, None) is None:
|
if getattr(parent_key_data, description.key, None) is None:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
|
@ -202,6 +202,7 @@ ROCKROBO_S4_MAX = "roborock.vacuum.a19"
|
|||||||
ROCKROBO_S5_MAX = "roborock.vacuum.s5e"
|
ROCKROBO_S5_MAX = "roborock.vacuum.s5e"
|
||||||
ROCKROBO_S6_PURE = "roborock.vacuum.a08"
|
ROCKROBO_S6_PURE = "roborock.vacuum.a08"
|
||||||
ROCKROBO_E2 = "roborock.vacuum.e2"
|
ROCKROBO_E2 = "roborock.vacuum.e2"
|
||||||
|
ROCKROBO_GENERIC = "roborock.vacuum"
|
||||||
MODELS_VACUUM = [
|
MODELS_VACUUM = [
|
||||||
ROCKROBO_V1,
|
ROCKROBO_V1,
|
||||||
ROCKROBO_E2,
|
ROCKROBO_E2,
|
||||||
@ -213,6 +214,7 @@ MODELS_VACUUM = [
|
|||||||
ROCKROBO_S6_MAXV,
|
ROCKROBO_S6_MAXV,
|
||||||
ROCKROBO_S6_PURE,
|
ROCKROBO_S6_PURE,
|
||||||
ROCKROBO_S7,
|
ROCKROBO_S7,
|
||||||
|
ROCKROBO_GENERIC,
|
||||||
]
|
]
|
||||||
MODELS_VACUUM_WITH_MOP = [
|
MODELS_VACUUM_WITH_MOP = [
|
||||||
ROCKROBO_E2,
|
ROCKROBO_E2,
|
||||||
@ -223,6 +225,9 @@ MODELS_VACUUM_WITH_MOP = [
|
|||||||
ROCKROBO_S6_PURE,
|
ROCKROBO_S6_PURE,
|
||||||
ROCKROBO_S7,
|
ROCKROBO_S7,
|
||||||
]
|
]
|
||||||
|
MODELS_VACUUM_WITH_SEPARATE_MOP = [
|
||||||
|
ROCKROBO_S7,
|
||||||
|
]
|
||||||
|
|
||||||
MODELS_AIR_MONITOR = [
|
MODELS_AIR_MONITOR = [
|
||||||
MODEL_AIRQUALITYMONITOR_V1,
|
MODEL_AIRQUALITYMONITOR_V1,
|
||||||
|
@ -166,8 +166,7 @@ class XiaomiCoordinatedMiioEntity(CoordinatorEntity):
|
|||||||
return cls._parse_datetime_time(value)
|
return cls._parse_datetime_time(value)
|
||||||
if isinstance(value, datetime.datetime):
|
if isinstance(value, datetime.datetime):
|
||||||
return cls._parse_datetime_datetime(value)
|
return cls._parse_datetime_datetime(value)
|
||||||
if isinstance(value, datetime.timedelta):
|
|
||||||
return cls._parse_time_delta(value)
|
|
||||||
if value is None:
|
if value is None:
|
||||||
_LOGGER.debug("Attribute %s is None, this is unexpected", attribute)
|
_LOGGER.debug("Attribute %s is None, this is unexpected", attribute)
|
||||||
|
|
||||||
@ -175,7 +174,7 @@ class XiaomiCoordinatedMiioEntity(CoordinatorEntity):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_time_delta(timedelta: datetime.timedelta) -> int:
|
def _parse_time_delta(timedelta: datetime.timedelta) -> int:
|
||||||
return timedelta.seconds
|
return int(timedelta.total_seconds())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_datetime_time(time: datetime.time) -> str:
|
def _parse_datetime_time(time: datetime.time) -> str:
|
||||||
@ -191,7 +190,3 @@ class XiaomiCoordinatedMiioEntity(CoordinatorEntity):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_datetime_datetime(time: datetime.datetime) -> str:
|
def _parse_datetime_datetime(time: datetime.datetime) -> str:
|
||||||
return time.isoformat()
|
return time.isoformat()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _parse_datetime_timedelta(time: datetime.timedelta) -> int:
|
|
||||||
return time.seconds
|
|
||||||
|
@ -299,7 +299,7 @@ HUMIDIFIER_MIOT_SENSORS = (
|
|||||||
ATTR_USE_TIME,
|
ATTR_USE_TIME,
|
||||||
ATTR_WATER_LEVEL,
|
ATTR_WATER_LEVEL,
|
||||||
)
|
)
|
||||||
HUMIDIFIER_MJJSQ_SENSORS = (ATTR_HUMIDITY, ATTR_TEMPERATURE, ATTR_USE_TIME)
|
HUMIDIFIER_MJJSQ_SENSORS = (ATTR_HUMIDITY, ATTR_TEMPERATURE)
|
||||||
|
|
||||||
PURIFIER_MIIO_SENSORS = (
|
PURIFIER_MIIO_SENSORS = (
|
||||||
ATTR_FILTER_LIFE_REMAINING,
|
ATTR_FILTER_LIFE_REMAINING,
|
||||||
|
@ -5,7 +5,7 @@ from typing import Final
|
|||||||
|
|
||||||
MAJOR_VERSION: Final = 2021
|
MAJOR_VERSION: Final = 2021
|
||||||
MINOR_VERSION: Final = 11
|
MINOR_VERSION: Final = 11
|
||||||
PATCH_VERSION: Final = "0"
|
PATCH_VERSION: Final = "1"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 8, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 8, 0)
|
||||||
|
@ -36,8 +36,8 @@ zeroconf==0.36.11
|
|||||||
|
|
||||||
pycryptodome>=3.6.6
|
pycryptodome>=3.6.6
|
||||||
|
|
||||||
# Constrain urllib3 to ensure we deal with CVE-2019-11236 & CVE-2019-11324
|
# Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503
|
||||||
urllib3>=1.24.3
|
urllib3>=1.26.5
|
||||||
|
|
||||||
# Constrain H11 to ensure we get a new enough version to support non-rfc line endings
|
# Constrain H11 to ensure we get a new enough version to support non-rfc line endings
|
||||||
h11>=0.12.0
|
h11>=0.12.0
|
||||||
|
@ -234,7 +234,7 @@ aiopulse==0.4.2
|
|||||||
aiopvapi==1.6.14
|
aiopvapi==1.6.14
|
||||||
|
|
||||||
# homeassistant.components.pvpc_hourly_pricing
|
# homeassistant.components.pvpc_hourly_pricing
|
||||||
aiopvpc==2.2.0
|
aiopvpc==2.2.1
|
||||||
|
|
||||||
# homeassistant.components.webostv
|
# homeassistant.components.webostv
|
||||||
aiopylgtv==0.4.0
|
aiopylgtv==0.4.0
|
||||||
@ -652,7 +652,7 @@ fjaraskupan==1.0.2
|
|||||||
flipr-api==1.4.1
|
flipr-api==1.4.1
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.24.13
|
flux_led==0.24.14
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
fnvhash==0.1.0
|
fnvhash==0.1.0
|
||||||
@ -2360,7 +2360,7 @@ uvcclient==0.11.0
|
|||||||
vallox-websocket-api==2.8.1
|
vallox-websocket-api==2.8.1
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2021.10.7
|
velbus-aio==2021.11.0
|
||||||
|
|
||||||
# homeassistant.components.venstar
|
# homeassistant.components.venstar
|
||||||
venstarcolortouch==0.14
|
venstarcolortouch==0.14
|
||||||
|
@ -161,7 +161,7 @@ aiopulse==0.4.2
|
|||||||
aiopvapi==1.6.14
|
aiopvapi==1.6.14
|
||||||
|
|
||||||
# homeassistant.components.pvpc_hourly_pricing
|
# homeassistant.components.pvpc_hourly_pricing
|
||||||
aiopvpc==2.2.0
|
aiopvpc==2.2.1
|
||||||
|
|
||||||
# homeassistant.components.webostv
|
# homeassistant.components.webostv
|
||||||
aiopylgtv==0.4.0
|
aiopylgtv==0.4.0
|
||||||
@ -387,7 +387,7 @@ fjaraskupan==1.0.2
|
|||||||
flipr-api==1.4.1
|
flipr-api==1.4.1
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.24.13
|
flux_led==0.24.14
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
fnvhash==0.1.0
|
fnvhash==0.1.0
|
||||||
@ -1364,7 +1364,7 @@ url-normalize==1.4.1
|
|||||||
uvcclient==0.11.0
|
uvcclient==0.11.0
|
||||||
|
|
||||||
# homeassistant.components.velbus
|
# homeassistant.components.velbus
|
||||||
velbus-aio==2021.10.7
|
velbus-aio==2021.11.0
|
||||||
|
|
||||||
# homeassistant.components.venstar
|
# homeassistant.components.venstar
|
||||||
venstarcolortouch==0.14
|
venstarcolortouch==0.14
|
||||||
|
@ -63,8 +63,8 @@ CONSTRAINT_PATH = os.path.join(
|
|||||||
CONSTRAINT_BASE = """
|
CONSTRAINT_BASE = """
|
||||||
pycryptodome>=3.6.6
|
pycryptodome>=3.6.6
|
||||||
|
|
||||||
# Constrain urllib3 to ensure we deal with CVE-2019-11236 & CVE-2019-11324
|
# Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503
|
||||||
urllib3>=1.24.3
|
urllib3>=1.26.5
|
||||||
|
|
||||||
# Constrain H11 to ensure we get a new enough version to support non-rfc line endings
|
# Constrain H11 to ensure we get a new enough version to support non-rfc line endings
|
||||||
h11>=0.12.0
|
h11>=0.12.0
|
||||||
|
@ -123,7 +123,24 @@ async def test_exception_handling(hass, error):
|
|||||||
assert result["errors"] == {"base": base_error}
|
assert result["errors"] == {"base": base_error}
|
||||||
|
|
||||||
|
|
||||||
async def test_lat_or_lon_not_specified(hass):
|
async def test_import_station_not_specified(hass):
|
||||||
|
"""Test that the import step works."""
|
||||||
|
with mocked_ec(), patch(
|
||||||
|
"homeassistant.components.environment_canada.async_setup_entry",
|
||||||
|
return_value=True,
|
||||||
|
):
|
||||||
|
fake_config = dict(FAKE_CONFIG)
|
||||||
|
del fake_config[CONF_STATION]
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
DOMAIN, context={"source": SOURCE_IMPORT}, data=fake_config
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
|
||||||
|
assert result["data"] == FAKE_CONFIG
|
||||||
|
assert result["title"] == FAKE_TITLE
|
||||||
|
|
||||||
|
|
||||||
|
async def test_import_lat_lon_not_specified(hass):
|
||||||
"""Test that the import step works."""
|
"""Test that the import step works."""
|
||||||
with mocked_ec(), patch(
|
with mocked_ec(), patch(
|
||||||
"homeassistant.components.environment_canada.async_setup_entry",
|
"homeassistant.components.environment_canada.async_setup_entry",
|
||||||
@ -131,6 +148,7 @@ async def test_lat_or_lon_not_specified(hass):
|
|||||||
):
|
):
|
||||||
fake_config = dict(FAKE_CONFIG)
|
fake_config = dict(FAKE_CONFIG)
|
||||||
del fake_config[CONF_LATITUDE]
|
del fake_config[CONF_LATITUDE]
|
||||||
|
del fake_config[CONF_LONGITUDE]
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=fake_config
|
DOMAIN, context={"source": SOURCE_IMPORT}, data=fake_config
|
||||||
)
|
)
|
||||||
|
@ -364,20 +364,16 @@ def test_supported_pgsql(caplog, pgsql_version):
|
|||||||
"sqlite_version,message",
|
"sqlite_version,message",
|
||||||
[
|
[
|
||||||
(
|
(
|
||||||
"3.32.0",
|
"3.30.0",
|
||||||
"Version 3.32.0 of SQLite is not supported; minimum supported version is 3.32.1.",
|
"Version 3.30.0 of SQLite is not supported; minimum supported version is 3.31.0.",
|
||||||
),
|
|
||||||
(
|
|
||||||
"3.31.0",
|
|
||||||
"Version 3.31.0 of SQLite is not supported; minimum supported version is 3.32.1.",
|
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"2.0.0",
|
"2.0.0",
|
||||||
"Version 2.0.0 of SQLite is not supported; minimum supported version is 3.32.1.",
|
"Version 2.0.0 of SQLite is not supported; minimum supported version is 3.31.0.",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"dogs",
|
"dogs",
|
||||||
"Version dogs of SQLite is not supported; minimum supported version is 3.32.1.",
|
"Version dogs of SQLite is not supported; minimum supported version is 3.31.0.",
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@ -410,7 +406,7 @@ def test_warn_outdated_sqlite(caplog, sqlite_version, message):
|
|||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"sqlite_version",
|
"sqlite_version",
|
||||||
[
|
[
|
||||||
("3.32.1"),
|
("3.31.0"),
|
||||||
("3.33.0"),
|
("3.33.0"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@ -695,6 +695,52 @@ async def config_flow_device_success(hass, model_to_test):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def config_flow_generic_roborock(hass):
|
||||||
|
"""Test a successful config flow for a generic roborock vacuum."""
|
||||||
|
DUMMY_MODEL = "roborock.vacuum.dummy"
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
const.DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] == "form"
|
||||||
|
assert result["step_id"] == "cloud"
|
||||||
|
assert result["errors"] == {}
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{const.CONF_MANUAL: True},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] == "form"
|
||||||
|
assert result["step_id"] == "manual"
|
||||||
|
assert result["errors"] == {}
|
||||||
|
|
||||||
|
mock_info = get_mock_info(model=DUMMY_MODEL)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.xiaomi_miio.device.Device.info",
|
||||||
|
return_value=mock_info,
|
||||||
|
):
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
result["flow_id"],
|
||||||
|
{CONF_HOST: TEST_HOST, CONF_TOKEN: TEST_TOKEN},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result["type"] == "create_entry"
|
||||||
|
assert result["title"] == DUMMY_MODEL
|
||||||
|
assert result["data"] == {
|
||||||
|
const.CONF_FLOW_TYPE: const.CONF_DEVICE,
|
||||||
|
const.CONF_CLOUD_USERNAME: None,
|
||||||
|
const.CONF_CLOUD_PASSWORD: None,
|
||||||
|
const.CONF_CLOUD_COUNTRY: None,
|
||||||
|
CONF_HOST: TEST_HOST,
|
||||||
|
CONF_TOKEN: TEST_TOKEN,
|
||||||
|
const.CONF_MODEL: DUMMY_MODEL,
|
||||||
|
const.CONF_MAC: TEST_MAC,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def zeroconf_device_success(hass, zeroconf_name_to_test, model_to_test):
|
async def zeroconf_device_success(hass, zeroconf_name_to_test, model_to_test):
|
||||||
"""Test a successful zeroconf discovery of a device (base class)."""
|
"""Test a successful zeroconf discovery of a device (base class)."""
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user