mirror of
https://github.com/home-assistant/core.git
synced 2025-07-27 15:17:35 +00:00
2024.8.1 (#123544)
This commit is contained in:
commit
ae4fc9504a
@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airgradient",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airgradient==0.7.1"],
|
||||
"requirements": ["airgradient==0.8.0"],
|
||||
"zeroconf": ["_airgradient._tcp.local."]
|
||||
}
|
||||
|
@ -317,21 +317,24 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
await self.async_update_status()
|
||||
|
||||
except (TimeoutError, ClientError):
|
||||
_LOGGER.error("Node %s:%s is offline, retrying later", self.name, self.port)
|
||||
_LOGGER.error("Node %s:%s is offline, retrying later", self.host, self.port)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
self.start_polling()
|
||||
|
||||
except CancelledError:
|
||||
_LOGGER.debug("Stopping the polling of node %s:%s", self.name, self.port)
|
||||
_LOGGER.debug("Stopping the polling of node %s:%s", self.host, self.port)
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error in %s:%s", self.name, self.port)
|
||||
_LOGGER.exception("Unexpected error in %s:%s", self.host, self.port)
|
||||
raise
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Start the polling task."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self._polling_task = self.hass.async_create_task(self._start_poll_command())
|
||||
self._polling_task = self.hass.async_create_background_task(
|
||||
self._start_poll_command(),
|
||||
name=f"bluesound.polling_{self.host}:{self.port}",
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Stop the polling task."""
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/chacon_dio",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["dio_chacon_api"],
|
||||
"requirements": ["dio-chacon-wifi-api==1.1.0"]
|
||||
"requirements": ["dio-chacon-wifi-api==1.2.0"]
|
||||
}
|
||||
|
@ -5,7 +5,9 @@ from __future__ import annotations
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from coinbase.wallet.client import Client
|
||||
from coinbase.rest import RESTClient
|
||||
from coinbase.rest.rest_base import HTTPError
|
||||
from coinbase.wallet.client import Client as LegacyClient
|
||||
from coinbase.wallet.error import AuthenticationError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -15,8 +17,23 @@ from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import (
|
||||
ACCOUNT_IS_VAULT,
|
||||
API_ACCOUNT_AMOUNT,
|
||||
API_ACCOUNT_AVALIABLE,
|
||||
API_ACCOUNT_BALANCE,
|
||||
API_ACCOUNT_CURRENCY,
|
||||
API_ACCOUNT_CURRENCY_CODE,
|
||||
API_ACCOUNT_HOLD,
|
||||
API_ACCOUNT_ID,
|
||||
API_ACCOUNTS_DATA,
|
||||
API_ACCOUNT_NAME,
|
||||
API_ACCOUNT_VALUE,
|
||||
API_ACCOUNTS,
|
||||
API_DATA,
|
||||
API_RATES_CURRENCY,
|
||||
API_RESOURCE_TYPE,
|
||||
API_TYPE_VAULT,
|
||||
API_V3_ACCOUNT_ID,
|
||||
API_V3_TYPE_VAULT,
|
||||
CONF_CURRENCIES,
|
||||
CONF_EXCHANGE_BASE,
|
||||
CONF_EXCHANGE_RATES,
|
||||
@ -59,9 +76,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData:
|
||||
"""Create and update a Coinbase Data instance."""
|
||||
client = Client(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
|
||||
if "organizations" not in entry.data[CONF_API_KEY]:
|
||||
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
|
||||
version = "v2"
|
||||
else:
|
||||
client = RESTClient(
|
||||
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
|
||||
)
|
||||
version = "v3"
|
||||
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
|
||||
instance = CoinbaseData(client, base_rate)
|
||||
instance = CoinbaseData(client, base_rate, version)
|
||||
instance.update()
|
||||
return instance
|
||||
|
||||
@ -86,42 +110,83 @@ async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> Non
|
||||
registry.async_remove(entity.entity_id)
|
||||
|
||||
|
||||
def get_accounts(client):
|
||||
def get_accounts(client, version):
|
||||
"""Handle paginated accounts."""
|
||||
response = client.get_accounts()
|
||||
accounts = response[API_ACCOUNTS_DATA]
|
||||
next_starting_after = response.pagination.next_starting_after
|
||||
|
||||
while next_starting_after:
|
||||
response = client.get_accounts(starting_after=next_starting_after)
|
||||
accounts += response[API_ACCOUNTS_DATA]
|
||||
if version == "v2":
|
||||
accounts = response[API_DATA]
|
||||
next_starting_after = response.pagination.next_starting_after
|
||||
|
||||
return accounts
|
||||
while next_starting_after:
|
||||
response = client.get_accounts(starting_after=next_starting_after)
|
||||
accounts += response[API_DATA]
|
||||
next_starting_after = response.pagination.next_starting_after
|
||||
|
||||
return [
|
||||
{
|
||||
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
|
||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
|
||||
API_ACCOUNT_CURRENCY_CODE
|
||||
],
|
||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
|
||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
|
||||
}
|
||||
for account in accounts
|
||||
]
|
||||
|
||||
accounts = response[API_ACCOUNTS]
|
||||
while response["has_next"]:
|
||||
response = client.get_accounts(cursor=response["cursor"])
|
||||
accounts += response["accounts"]
|
||||
|
||||
return [
|
||||
{
|
||||
API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID],
|
||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY],
|
||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE]
|
||||
+ account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE],
|
||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT,
|
||||
}
|
||||
for account in accounts
|
||||
]
|
||||
|
||||
|
||||
class CoinbaseData:
|
||||
"""Get the latest data and update the states."""
|
||||
|
||||
def __init__(self, client, exchange_base):
|
||||
def __init__(self, client, exchange_base, version):
|
||||
"""Init the coinbase data object."""
|
||||
|
||||
self.client = client
|
||||
self.accounts = None
|
||||
self.exchange_base = exchange_base
|
||||
self.exchange_rates = None
|
||||
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
|
||||
if version == "v2":
|
||||
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
|
||||
else:
|
||||
self.user_id = (
|
||||
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
|
||||
)
|
||||
self.api_version = version
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data from coinbase."""
|
||||
|
||||
try:
|
||||
self.accounts = get_accounts(self.client)
|
||||
self.exchange_rates = self.client.get_exchange_rates(
|
||||
currency=self.exchange_base
|
||||
)
|
||||
except AuthenticationError as coinbase_error:
|
||||
self.accounts = get_accounts(self.client, self.api_version)
|
||||
if self.api_version == "v2":
|
||||
self.exchange_rates = self.client.get_exchange_rates(
|
||||
currency=self.exchange_base
|
||||
)
|
||||
else:
|
||||
self.exchange_rates = self.client.get(
|
||||
"/v2/exchange-rates",
|
||||
params={API_RATES_CURRENCY: self.exchange_base},
|
||||
)[API_DATA]
|
||||
except (AuthenticationError, HTTPError) as coinbase_error:
|
||||
_LOGGER.error(
|
||||
"Authentication error connecting to coinbase: %s", coinbase_error
|
||||
)
|
||||
|
@ -5,7 +5,9 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from coinbase.wallet.client import Client
|
||||
from coinbase.rest import RESTClient
|
||||
from coinbase.rest.rest_base import HTTPError
|
||||
from coinbase.wallet.client import Client as LegacyClient
|
||||
from coinbase.wallet.error import AuthenticationError
|
||||
import voluptuous as vol
|
||||
|
||||
@ -15,18 +17,17 @@ from homeassistant.config_entries import (
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import get_accounts
|
||||
from .const import (
|
||||
ACCOUNT_IS_VAULT,
|
||||
API_ACCOUNT_CURRENCY,
|
||||
API_ACCOUNT_CURRENCY_CODE,
|
||||
API_DATA,
|
||||
API_RATES,
|
||||
API_RESOURCE_TYPE,
|
||||
API_TYPE_VAULT,
|
||||
CONF_CURRENCIES,
|
||||
CONF_EXCHANGE_BASE,
|
||||
CONF_EXCHANGE_PRECISION,
|
||||
@ -49,8 +50,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
|
||||
def get_user_from_client(api_key, api_token):
|
||||
"""Get the user name from Coinbase API credentials."""
|
||||
client = Client(api_key, api_token)
|
||||
return client.get_current_user()
|
||||
if "organizations" not in api_key:
|
||||
client = LegacyClient(api_key, api_token)
|
||||
return client.get_current_user()["name"]
|
||||
client = RESTClient(api_key=api_key, api_secret=api_token)
|
||||
return client.get_portfolios()["portfolios"][0]["name"]
|
||||
|
||||
|
||||
async def validate_api(hass: HomeAssistant, data):
|
||||
@ -60,11 +64,13 @@ async def validate_api(hass: HomeAssistant, data):
|
||||
user = await hass.async_add_executor_job(
|
||||
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
|
||||
)
|
||||
except AuthenticationError as error:
|
||||
if "api key" in str(error):
|
||||
except (AuthenticationError, HTTPError) as error:
|
||||
if "api key" in str(error) or " 401 Client Error" in str(error):
|
||||
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
|
||||
raise InvalidKey from error
|
||||
if "invalid signature" in str(error):
|
||||
if "invalid signature" in str(
|
||||
error
|
||||
) or "'Could not deserialize key data" in str(error):
|
||||
_LOGGER.debug(
|
||||
"Coinbase rejected API credentials due to an invalid API secret"
|
||||
)
|
||||
@ -73,8 +79,8 @@ async def validate_api(hass: HomeAssistant, data):
|
||||
raise InvalidAuth from error
|
||||
except ConnectionError as error:
|
||||
raise CannotConnect from error
|
||||
|
||||
return {"title": user["name"]}
|
||||
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
|
||||
return {"title": user, "api_version": api_version}
|
||||
|
||||
|
||||
async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, options):
|
||||
@ -82,14 +88,20 @@ async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, optio
|
||||
|
||||
client = hass.data[DOMAIN][config_entry.entry_id].client
|
||||
|
||||
accounts = await hass.async_add_executor_job(get_accounts, client)
|
||||
accounts = await hass.async_add_executor_job(
|
||||
get_accounts, client, config_entry.data.get("api_version", "v2")
|
||||
)
|
||||
|
||||
accounts_currencies = [
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
account[API_ACCOUNT_CURRENCY]
|
||||
for account in accounts
|
||||
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
|
||||
if not account[ACCOUNT_IS_VAULT]
|
||||
]
|
||||
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
|
||||
if config_entry.data.get("api_version", "v2") == "v2":
|
||||
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
|
||||
else:
|
||||
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
|
||||
available_rates = resp[API_DATA]
|
||||
if CONF_CURRENCIES in options:
|
||||
for currency in options[CONF_CURRENCIES]:
|
||||
if currency not in accounts_currencies:
|
||||
@ -134,6 +146,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
user_input[CONF_API_VERSION] = info["api_version"]
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
|
@ -1,5 +1,7 @@
|
||||
"""Constants used for Coinbase."""
|
||||
|
||||
ACCOUNT_IS_VAULT = "is_vault"
|
||||
|
||||
CONF_CURRENCIES = "account_balance_currencies"
|
||||
CONF_EXCHANGE_BASE = "exchange_base"
|
||||
CONF_EXCHANGE_RATES = "exchange_rate_currencies"
|
||||
@ -10,18 +12,25 @@ DOMAIN = "coinbase"
|
||||
|
||||
# Constants for data returned by Coinbase API
|
||||
API_ACCOUNT_AMOUNT = "amount"
|
||||
API_ACCOUNT_AVALIABLE = "available_balance"
|
||||
API_ACCOUNT_BALANCE = "balance"
|
||||
API_ACCOUNT_CURRENCY = "currency"
|
||||
API_ACCOUNT_CURRENCY_CODE = "code"
|
||||
API_ACCOUNT_HOLD = "hold"
|
||||
API_ACCOUNT_ID = "id"
|
||||
API_ACCOUNT_NATIVE_BALANCE = "balance"
|
||||
API_ACCOUNT_NAME = "name"
|
||||
API_ACCOUNTS_DATA = "data"
|
||||
API_ACCOUNT_VALUE = "value"
|
||||
API_ACCOUNTS = "accounts"
|
||||
API_DATA = "data"
|
||||
API_RATES = "rates"
|
||||
API_RATES_CURRENCY = "currency"
|
||||
API_RESOURCE_PATH = "resource_path"
|
||||
API_RESOURCE_TYPE = "type"
|
||||
API_TYPE_VAULT = "vault"
|
||||
API_USD = "USD"
|
||||
API_V3_ACCOUNT_ID = "uuid"
|
||||
API_V3_TYPE_VAULT = "ACCOUNT_TYPE_VAULT"
|
||||
|
||||
WALLETS = {
|
||||
"1INCH": "1INCH",
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["coinbase"],
|
||||
"requirements": ["coinbase==2.1.0"]
|
||||
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
|
||||
}
|
||||
|
@ -12,15 +12,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import CoinbaseData
|
||||
from .const import (
|
||||
ACCOUNT_IS_VAULT,
|
||||
API_ACCOUNT_AMOUNT,
|
||||
API_ACCOUNT_BALANCE,
|
||||
API_ACCOUNT_CURRENCY,
|
||||
API_ACCOUNT_CURRENCY_CODE,
|
||||
API_ACCOUNT_ID,
|
||||
API_ACCOUNT_NAME,
|
||||
API_RATES,
|
||||
API_RESOURCE_TYPE,
|
||||
API_TYPE_VAULT,
|
||||
CONF_CURRENCIES,
|
||||
CONF_EXCHANGE_PRECISION,
|
||||
CONF_EXCHANGE_PRECISION_DEFAULT,
|
||||
@ -31,6 +28,7 @@ from .const import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_NATIVE_BALANCE = "Balance in native currency"
|
||||
ATTR_API_VERSION = "API Version"
|
||||
|
||||
CURRENCY_ICONS = {
|
||||
"BTC": "mdi:currency-btc",
|
||||
@ -56,9 +54,9 @@ async def async_setup_entry(
|
||||
entities: list[SensorEntity] = []
|
||||
|
||||
provided_currencies: list[str] = [
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
account[API_ACCOUNT_CURRENCY]
|
||||
for account in instance.accounts
|
||||
if account[API_RESOURCE_TYPE] != API_TYPE_VAULT
|
||||
if not account[ACCOUNT_IS_VAULT]
|
||||
]
|
||||
|
||||
desired_currencies: list[str] = []
|
||||
@ -73,6 +71,11 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
for currency in desired_currencies:
|
||||
_LOGGER.debug(
|
||||
"Attempting to set up %s account sensor with %s API",
|
||||
currency,
|
||||
instance.api_version,
|
||||
)
|
||||
if currency not in provided_currencies:
|
||||
_LOGGER.warning(
|
||||
(
|
||||
@ -85,12 +88,17 @@ async def async_setup_entry(
|
||||
entities.append(AccountSensor(instance, currency))
|
||||
|
||||
if CONF_EXCHANGE_RATES in config_entry.options:
|
||||
entities.extend(
|
||||
ExchangeRateSensor(
|
||||
instance, rate, exchange_base_currency, exchange_precision
|
||||
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
|
||||
_LOGGER.debug(
|
||||
"Attempting to set up %s account sensor with %s API",
|
||||
rate,
|
||||
instance.api_version,
|
||||
)
|
||||
entities.append(
|
||||
ExchangeRateSensor(
|
||||
instance, rate, exchange_base_currency, exchange_precision
|
||||
)
|
||||
)
|
||||
for rate in config_entry.options[CONF_EXCHANGE_RATES]
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@ -105,26 +113,21 @@ class AccountSensor(SensorEntity):
|
||||
self._coinbase_data = coinbase_data
|
||||
self._currency = currency
|
||||
for account in coinbase_data.accounts:
|
||||
if (
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency
|
||||
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
|
||||
):
|
||||
if account[API_ACCOUNT_CURRENCY] != currency or account[ACCOUNT_IS_VAULT]:
|
||||
continue
|
||||
self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}"
|
||||
self._attr_unique_id = (
|
||||
f"coinbase-{account[API_ACCOUNT_ID]}-wallet-"
|
||||
f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}"
|
||||
f"{account[API_ACCOUNT_CURRENCY]}"
|
||||
)
|
||||
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
|
||||
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][
|
||||
API_ACCOUNT_CURRENCY_CODE
|
||||
]
|
||||
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
|
||||
self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY]
|
||||
self._attr_icon = CURRENCY_ICONS.get(
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE],
|
||||
account[API_ACCOUNT_CURRENCY],
|
||||
DEFAULT_COIN_ICON,
|
||||
)
|
||||
self._native_balance = round(
|
||||
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
|
||||
float(account[API_ACCOUNT_AMOUNT])
|
||||
/ float(coinbase_data.exchange_rates[API_RATES][currency]),
|
||||
2,
|
||||
)
|
||||
@ -144,21 +147,26 @@ class AccountSensor(SensorEntity):
|
||||
"""Return the state attributes of the sensor."""
|
||||
return {
|
||||
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
||||
ATTR_API_VERSION: self._coinbase_data.api_version,
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest state of the sensor."""
|
||||
_LOGGER.debug(
|
||||
"Updating %s account sensor with %s API",
|
||||
self._currency,
|
||||
self._coinbase_data.api_version,
|
||||
)
|
||||
self._coinbase_data.update()
|
||||
for account in self._coinbase_data.accounts:
|
||||
if (
|
||||
account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]
|
||||
!= self._currency
|
||||
or account[API_RESOURCE_TYPE] == API_TYPE_VAULT
|
||||
account[API_ACCOUNT_CURRENCY] != self._currency
|
||||
or account[ACCOUNT_IS_VAULT]
|
||||
):
|
||||
continue
|
||||
self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]
|
||||
self._attr_native_value = account[API_ACCOUNT_AMOUNT]
|
||||
self._native_balance = round(
|
||||
float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT])
|
||||
float(account[API_ACCOUNT_AMOUNT])
|
||||
/ float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
|
||||
2,
|
||||
)
|
||||
@ -202,8 +210,13 @@ class ExchangeRateSensor(SensorEntity):
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest state of the sensor."""
|
||||
_LOGGER.debug(
|
||||
"Updating %s rate sensor with %s API",
|
||||
self._currency,
|
||||
self._coinbase_data.api_version,
|
||||
)
|
||||
self._coinbase_data.update()
|
||||
self._attr_native_value = round(
|
||||
1 / float(self._coinbase_data.exchange_rates.rates[self._currency]),
|
||||
1 / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]),
|
||||
self._precision,
|
||||
)
|
||||
|
@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.13.1"],
|
||||
"requirements": ["pydaikin==2.13.2"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ class ConfiguredDoorBird:
|
||||
title: str | None = data.get("title")
|
||||
if not title or not title.startswith("Home Assistant"):
|
||||
continue
|
||||
event = title.split("(")[1].strip(")")
|
||||
event = title.partition("(")[2].strip(")")
|
||||
if input_type := favorite_input_type.get(identifier):
|
||||
events.append(DoorbirdEvent(event, input_type))
|
||||
elif input_type := default_event_types.get(event):
|
||||
|
@ -431,41 +431,42 @@ def rename_old_gas_to_mbus(
|
||||
) -> None:
|
||||
"""Rename old gas sensor to mbus variant."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)})
|
||||
if device_entry_v1 is not None:
|
||||
device_id = device_entry_v1.id
|
||||
for dev_id in (mbus_device_id, entry.entry_id):
|
||||
device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, dev_id)})
|
||||
if device_entry_v1 is not None:
|
||||
device_id = device_entry_v1.id
|
||||
|
||||
ent_reg = er.async_get(hass)
|
||||
entries = er.async_entries_for_device(ent_reg, device_id)
|
||||
ent_reg = er.async_get(hass)
|
||||
entries = er.async_entries_for_device(ent_reg, device_id)
|
||||
|
||||
for entity in entries:
|
||||
if entity.unique_id.endswith(
|
||||
"belgium_5min_gas_meter_reading"
|
||||
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
|
||||
try:
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=mbus_device_id,
|
||||
)
|
||||
except ValueError:
|
||||
LOGGER.debug(
|
||||
"Skip migration of %s because it already exists",
|
||||
entity.entity_id,
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
# Cleanup old device
|
||||
dev_entities = er.async_entries_for_device(
|
||||
ent_reg, device_id, include_disabled_entities=True
|
||||
)
|
||||
if not dev_entities:
|
||||
dev_reg.async_remove_device(device_id)
|
||||
for entity in entries:
|
||||
if entity.unique_id.endswith(
|
||||
"belgium_5min_gas_meter_reading"
|
||||
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
|
||||
try:
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=mbus_device_id,
|
||||
)
|
||||
except ValueError:
|
||||
LOGGER.debug(
|
||||
"Skip migration of %s because it already exists",
|
||||
entity.entity_id,
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
# Cleanup old device
|
||||
dev_entities = er.async_entries_for_device(
|
||||
ent_reg, device_id, include_disabled_entities=True
|
||||
)
|
||||
if not dev_entities:
|
||||
dev_reg.async_remove_device(device_id)
|
||||
|
||||
|
||||
def is_supported_description(
|
||||
|
@ -653,8 +653,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
entities: list[er.RegistryEntry] = er.async_entries_for_config_entry(
|
||||
entity_reg, config_entry.entry_id
|
||||
)
|
||||
|
||||
orphan_macs: set[str] = set()
|
||||
for entity in entities:
|
||||
entry_mac = entity.unique_id.split("_")[0]
|
||||
if (
|
||||
@ -662,17 +660,16 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
or "_internet_access" in entity.unique_id
|
||||
) and entry_mac not in device_hosts:
|
||||
_LOGGER.info("Removing orphan entity entry %s", entity.entity_id)
|
||||
orphan_macs.add(entry_mac)
|
||||
entity_reg.async_remove(entity.entity_id)
|
||||
|
||||
device_reg = dr.async_get(self.hass)
|
||||
orphan_connections = {
|
||||
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in orphan_macs
|
||||
valid_connections = {
|
||||
(CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in device_hosts
|
||||
}
|
||||
for device in dr.async_entries_for_config_entry(
|
||||
device_reg, config_entry.entry_id
|
||||
):
|
||||
if any(con in device.connections for con in orphan_connections):
|
||||
if not any(con in device.connections for con in valid_connections):
|
||||
_LOGGER.debug("Removing obsolete device entry %s", device.name)
|
||||
device_reg.async_update_device(
|
||||
device.id, remove_config_entry_id=config_entry.entry_id
|
||||
|
@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20240806.1"]
|
||||
"requirements": ["home-assistant-frontend==20240809.0"]
|
||||
}
|
||||
|
@ -22,8 +22,9 @@ from homeassistant.components.notify import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_SERVICE,
|
||||
CONF_ACTION,
|
||||
CONF_ENTITIES,
|
||||
CONF_SERVICE,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@ -36,11 +37,37 @@ from .entity import GroupEntity
|
||||
|
||||
CONF_SERVICES = "services"
|
||||
|
||||
|
||||
def _backward_compat_schema(value: Any | None) -> Any:
|
||||
"""Backward compatibility for notify service schemas."""
|
||||
|
||||
if not isinstance(value, dict):
|
||||
return value
|
||||
|
||||
# `service` has been renamed to `action`
|
||||
if CONF_SERVICE in value:
|
||||
if CONF_ACTION in value:
|
||||
raise vol.Invalid(
|
||||
"Cannot specify both 'service' and 'action'. Please use 'action' only."
|
||||
)
|
||||
value[CONF_ACTION] = value.pop(CONF_SERVICE)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_SERVICES): vol.All(
|
||||
cv.ensure_list,
|
||||
[{vol.Required(ATTR_SERVICE): cv.slug, vol.Optional(ATTR_DATA): dict}],
|
||||
[
|
||||
vol.All(
|
||||
_backward_compat_schema,
|
||||
{
|
||||
vol.Required(CONF_ACTION): cv.slug,
|
||||
vol.Optional(ATTR_DATA): dict,
|
||||
},
|
||||
)
|
||||
],
|
||||
)
|
||||
}
|
||||
)
|
||||
@ -88,7 +115,7 @@ class GroupNotifyPlatform(BaseNotificationService):
|
||||
tasks.append(
|
||||
asyncio.create_task(
|
||||
self.hass.services.async_call(
|
||||
DOMAIN, entity[ATTR_SERVICE], sending_payload, blocking=True
|
||||
DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -7,6 +7,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==v6.1.1"],
|
||||
"requirements": ["python-homewizard-energy==v6.2.0"],
|
||||
"zeroconf": ["_hwenergy._tcp.local."]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["jvcprojector"],
|
||||
"requirements": ["pyjvcprojector==1.0.11"]
|
||||
"requirements": ["pyjvcprojector==1.0.12"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/monzo",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["monzopy==1.3.0"]
|
||||
"requirements": ["monzopy==1.3.2"]
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from pyopenweathermap import OWMClient
|
||||
from pyopenweathermap import create_owm_client
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@ -33,6 +33,7 @@ class OpenweathermapData:
|
||||
"""Runtime data definition."""
|
||||
|
||||
name: str
|
||||
mode: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
@ -52,7 +53,7 @@ async def async_setup_entry(
|
||||
else:
|
||||
async_delete_issue(hass, entry.entry_id)
|
||||
|
||||
owm_client = OWMClient(api_key, mode, lang=language)
|
||||
owm_client = create_owm_client(api_key, mode, lang=language)
|
||||
weather_coordinator = WeatherUpdateCoordinator(
|
||||
owm_client, latitude, longitude, hass
|
||||
)
|
||||
@ -61,7 +62,7 @@ async def async_setup_entry(
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
|
||||
entry.runtime_data = OpenweathermapData(name, weather_coordinator)
|
||||
entry.runtime_data = OpenweathermapData(name, mode, weather_coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
@ -58,10 +58,17 @@ FORECAST_MODE_DAILY = "daily"
|
||||
FORECAST_MODE_FREE_DAILY = "freedaily"
|
||||
FORECAST_MODE_ONECALL_HOURLY = "onecall_hourly"
|
||||
FORECAST_MODE_ONECALL_DAILY = "onecall_daily"
|
||||
OWM_MODE_V25 = "v2.5"
|
||||
OWM_MODE_FREE_CURRENT = "current"
|
||||
OWM_MODE_FREE_FORECAST = "forecast"
|
||||
OWM_MODE_V30 = "v3.0"
|
||||
OWM_MODES = [OWM_MODE_V30, OWM_MODE_V25]
|
||||
DEFAULT_OWM_MODE = OWM_MODE_V30
|
||||
OWM_MODE_V25 = "v2.5"
|
||||
OWM_MODES = [
|
||||
OWM_MODE_FREE_CURRENT,
|
||||
OWM_MODE_FREE_FORECAST,
|
||||
OWM_MODE_V30,
|
||||
OWM_MODE_V25,
|
||||
]
|
||||
DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT
|
||||
|
||||
LANGUAGES = [
|
||||
"af",
|
||||
|
@ -86,8 +86,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Format the weather response correctly."""
|
||||
_LOGGER.debug("OWM weather response: %s", weather_report)
|
||||
|
||||
current_weather = (
|
||||
self._get_current_weather_data(weather_report.current)
|
||||
if weather_report.current is not None
|
||||
else {}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_API_CURRENT: self._get_current_weather_data(weather_report.current),
|
||||
ATTR_API_CURRENT: current_weather,
|
||||
ATTR_API_HOURLY_FORECAST: [
|
||||
self._get_hourly_forecast_weather_data(item)
|
||||
for item in weather_report.hourly_forecast
|
||||
@ -122,6 +128,8 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
}
|
||||
|
||||
def _get_hourly_forecast_weather_data(self, forecast: HourlyWeatherForecast):
|
||||
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
|
||||
|
||||
return Forecast(
|
||||
datetime=forecast.date_time.isoformat(),
|
||||
condition=self._get_condition(forecast.condition.id),
|
||||
@ -134,12 +142,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
wind_speed=forecast.wind_speed,
|
||||
native_wind_gust_speed=forecast.wind_gust,
|
||||
wind_bearing=forecast.wind_bearing,
|
||||
uv_index=float(forecast.uv_index),
|
||||
uv_index=uv_index,
|
||||
precipitation_probability=round(forecast.precipitation_probability * 100),
|
||||
precipitation=self._calc_precipitation(forecast.rain, forecast.snow),
|
||||
)
|
||||
|
||||
def _get_daily_forecast_weather_data(self, forecast: DailyWeatherForecast):
|
||||
uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None
|
||||
|
||||
return Forecast(
|
||||
datetime=forecast.date_time.isoformat(),
|
||||
condition=self._get_condition(forecast.condition.id),
|
||||
@ -153,7 +163,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
wind_speed=forecast.wind_speed,
|
||||
native_wind_gust_speed=forecast.wind_gust,
|
||||
wind_bearing=forecast.wind_bearing,
|
||||
uv_index=float(forecast.uv_index),
|
||||
uv_index=uv_index,
|
||||
precipitation_probability=round(forecast.precipitation_probability * 100),
|
||||
precipitation=round(forecast.rain + forecast.snow, 2),
|
||||
)
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/openweathermap",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyopenweathermap"],
|
||||
"requirements": ["pyopenweathermap==0.0.9"]
|
||||
"requirements": ["pyopenweathermap==0.1.1"]
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ from homeassistant.const import (
|
||||
UnitOfVolumetricFlux,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
@ -47,6 +48,7 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
OWM_MODE_FREE_FORECAST,
|
||||
)
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
|
||||
@ -161,16 +163,23 @@ async def async_setup_entry(
|
||||
name = domain_data.name
|
||||
weather_coordinator = domain_data.coordinator
|
||||
|
||||
entities: list[AbstractOpenWeatherMapSensor] = [
|
||||
OpenWeatherMapSensor(
|
||||
name,
|
||||
f"{config_entry.unique_id}-{description.key}",
|
||||
description,
|
||||
weather_coordinator,
|
||||
if domain_data.mode == OWM_MODE_FREE_FORECAST:
|
||||
entity_registry = er.async_get(hass)
|
||||
entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
for entry in entries:
|
||||
entity_registry.async_remove(entry.entity_id)
|
||||
else:
|
||||
async_add_entities(
|
||||
OpenWeatherMapSensor(
|
||||
name,
|
||||
f"{config_entry.unique_id}-{description.key}",
|
||||
description,
|
||||
weather_coordinator,
|
||||
)
|
||||
for description in WEATHER_SENSOR_TYPES
|
||||
)
|
||||
for description in WEATHER_SENSOR_TYPES
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class AbstractOpenWeatherMapSensor(SensorEntity):
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyopenweathermap import OWMClient, RequestError
|
||||
from pyopenweathermap import RequestError, create_owm_client
|
||||
|
||||
from homeassistant.const import CONF_LANGUAGE, CONF_MODE
|
||||
|
||||
@ -16,7 +16,7 @@ async def validate_api_key(api_key, mode):
|
||||
api_key_valid = None
|
||||
errors, description_placeholders = {}, {}
|
||||
try:
|
||||
owm_client = OWMClient(api_key, mode)
|
||||
owm_client = create_owm_client(api_key, mode)
|
||||
api_key_valid = await owm_client.validate_key()
|
||||
except RequestError as error:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
@ -8,6 +8,7 @@ from homeassistant.components.weather import (
|
||||
WeatherEntityFeature,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
UnitOfLength,
|
||||
UnitOfPrecipitationDepth,
|
||||
UnitOfPressure,
|
||||
UnitOfSpeed,
|
||||
@ -29,6 +30,7 @@ from .const import (
|
||||
ATTR_API_HUMIDITY,
|
||||
ATTR_API_PRESSURE,
|
||||
ATTR_API_TEMPERATURE,
|
||||
ATTR_API_VISIBILITY_DISTANCE,
|
||||
ATTR_API_WIND_BEARING,
|
||||
ATTR_API_WIND_GUST,
|
||||
ATTR_API_WIND_SPEED,
|
||||
@ -36,6 +38,9 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
OWM_MODE_FREE_FORECAST,
|
||||
OWM_MODE_V25,
|
||||
OWM_MODE_V30,
|
||||
)
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
|
||||
@ -48,10 +53,11 @@ async def async_setup_entry(
|
||||
"""Set up OpenWeatherMap weather entity based on a config entry."""
|
||||
domain_data = config_entry.runtime_data
|
||||
name = domain_data.name
|
||||
mode = domain_data.mode
|
||||
weather_coordinator = domain_data.coordinator
|
||||
|
||||
unique_id = f"{config_entry.unique_id}"
|
||||
owm_weather = OpenWeatherMapWeather(name, unique_id, weather_coordinator)
|
||||
owm_weather = OpenWeatherMapWeather(name, unique_id, mode, weather_coordinator)
|
||||
|
||||
async_add_entities([owm_weather], False)
|
||||
|
||||
@ -66,11 +72,13 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
|
||||
_attr_native_pressure_unit = UnitOfPressure.HPA
|
||||
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND
|
||||
_attr_native_visibility_unit = UnitOfLength.METERS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
unique_id: str,
|
||||
mode: str,
|
||||
weather_coordinator: WeatherUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
@ -83,59 +91,71 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina
|
||||
manufacturer=MANUFACTURER,
|
||||
name=DEFAULT_NAME,
|
||||
)
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
|
||||
if mode in (OWM_MODE_V30, OWM_MODE_V25):
|
||||
self._attr_supported_features = (
|
||||
WeatherEntityFeature.FORECAST_DAILY
|
||||
| WeatherEntityFeature.FORECAST_HOURLY
|
||||
)
|
||||
elif mode == OWM_MODE_FREE_FORECAST:
|
||||
self._attr_supported_features = WeatherEntityFeature.FORECAST_HOURLY
|
||||
|
||||
@property
|
||||
def condition(self) -> str | None:
|
||||
"""Return the current condition."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CONDITION]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CONDITION)
|
||||
|
||||
@property
|
||||
def cloud_coverage(self) -> float | None:
|
||||
"""Return the Cloud coverage in %."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CLOUDS]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CLOUDS)
|
||||
|
||||
@property
|
||||
def native_apparent_temperature(self) -> float | None:
|
||||
"""Return the apparent temperature."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_FEELS_LIKE_TEMPERATURE]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(
|
||||
ATTR_API_FEELS_LIKE_TEMPERATURE
|
||||
)
|
||||
|
||||
@property
|
||||
def native_temperature(self) -> float | None:
|
||||
"""Return the temperature."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_TEMPERATURE]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_TEMPERATURE)
|
||||
|
||||
@property
|
||||
def native_pressure(self) -> float | None:
|
||||
"""Return the pressure."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_PRESSURE]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_PRESSURE)
|
||||
|
||||
@property
|
||||
def humidity(self) -> float | None:
|
||||
"""Return the humidity."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_HUMIDITY]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_HUMIDITY)
|
||||
|
||||
@property
|
||||
def native_dew_point(self) -> float | None:
|
||||
"""Return the dew point."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_DEW_POINT]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_DEW_POINT)
|
||||
|
||||
@property
|
||||
def native_wind_gust_speed(self) -> float | None:
|
||||
"""Return the wind gust speed."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_GUST]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_GUST)
|
||||
|
||||
@property
|
||||
def native_wind_speed(self) -> float | None:
|
||||
"""Return the wind speed."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_SPEED]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_SPEED)
|
||||
|
||||
@property
|
||||
def wind_bearing(self) -> float | str | None:
|
||||
"""Return the wind bearing."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_BEARING]
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_BEARING)
|
||||
|
||||
@property
|
||||
def visibility(self) -> float | str | None:
|
||||
"""Return visibility."""
|
||||
return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_VISIBILITY_DISTANCE)
|
||||
|
||||
@callback
|
||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||
|
@ -632,7 +632,7 @@ def _update_states_table_with_foreign_key_options(
|
||||
|
||||
def _drop_foreign_key_constraints(
|
||||
session_maker: Callable[[], Session], engine: Engine, table: str, column: str
|
||||
) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]:
|
||||
) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]:
|
||||
"""Drop foreign key constraints for a table on specific columns."""
|
||||
inspector = sqlalchemy.inspect(engine)
|
||||
dropped_constraints = [
|
||||
@ -649,6 +649,7 @@ def _drop_foreign_key_constraints(
|
||||
if foreign_key["name"] and foreign_key["constrained_columns"] == [column]
|
||||
]
|
||||
|
||||
fk_remove_ok = True
|
||||
for drop in drops:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
try:
|
||||
@ -660,8 +661,9 @@ def _drop_foreign_key_constraints(
|
||||
TABLE_STATES,
|
||||
column,
|
||||
)
|
||||
fk_remove_ok = False
|
||||
|
||||
return dropped_constraints
|
||||
return fk_remove_ok, dropped_constraints
|
||||
|
||||
|
||||
def _restore_foreign_key_constraints(
|
||||
@ -1481,7 +1483,7 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44):
|
||||
for column in columns
|
||||
for dropped_constraint in _drop_foreign_key_constraints(
|
||||
self.session_maker, self.engine, table, column
|
||||
)
|
||||
)[1]
|
||||
]
|
||||
_LOGGER.debug("Dropped foreign key constraints: %s", dropped_constraints)
|
||||
|
||||
@ -1956,14 +1958,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool:
|
||||
if instance.dialect_name == SupportedDialect.SQLITE:
|
||||
# SQLite does not support dropping foreign key constraints
|
||||
# so we have to rebuild the table
|
||||
rebuild_sqlite_table(session_maker, instance.engine, States)
|
||||
fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States)
|
||||
else:
|
||||
_drop_foreign_key_constraints(
|
||||
fk_remove_ok, _ = _drop_foreign_key_constraints(
|
||||
session_maker, instance.engine, TABLE_STATES, "event_id"
|
||||
)
|
||||
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
|
||||
instance.use_legacy_events_index = False
|
||||
_mark_migration_done(session, EventIDPostMigration)
|
||||
if fk_remove_ok:
|
||||
_drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX)
|
||||
instance.use_legacy_events_index = False
|
||||
_mark_migration_done(session, EventIDPostMigration)
|
||||
|
||||
return True
|
||||
|
||||
@ -2419,6 +2422,7 @@ class EventIDPostMigration(BaseRunTimeMigration):
|
||||
|
||||
migration_id = "event_id_post_migration"
|
||||
task = MigrationTask
|
||||
migration_version = 2
|
||||
|
||||
@staticmethod
|
||||
def migrate_data(instance: Recorder) -> bool:
|
||||
@ -2469,7 +2473,7 @@ def _mark_migration_done(
|
||||
|
||||
def rebuild_sqlite_table(
|
||||
session_maker: Callable[[], Session], engine: Engine, table: type[Base]
|
||||
) -> None:
|
||||
) -> bool:
|
||||
"""Rebuild an SQLite table.
|
||||
|
||||
This must only be called after all migrations are complete
|
||||
@ -2524,8 +2528,10 @@ def rebuild_sqlite_table(
|
||||
# Swallow the exception since we do not want to ever raise
|
||||
# an integrity error as it would cause the database
|
||||
# to be discarded and recreated from scratch
|
||||
return False
|
||||
else:
|
||||
_LOGGER.warning("Rebuilding SQLite table %s finished", orig_name)
|
||||
return True
|
||||
finally:
|
||||
with session_scope(session=session_maker()) as session:
|
||||
# Step 12 - Re-enable foreign keys
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/russound_rio",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiorussound"],
|
||||
"requirements": ["aiorussound==2.2.0"]
|
||||
"requirements": ["aiorussound==2.2.2"]
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/tessie",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["tessie"],
|
||||
"loggers": ["tessie", "tesla-fleet-api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.7.3"]
|
||||
}
|
||||
|
@ -55,12 +55,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]):
|
||||
@property
|
||||
def limit(self) -> int:
|
||||
"""Return limit."""
|
||||
return self.config_entry.data.get(CONF_LIMIT, DEFAULT_LIMIT)
|
||||
return self.config_entry.options.get(CONF_LIMIT, DEFAULT_LIMIT)
|
||||
|
||||
@property
|
||||
def order(self) -> str:
|
||||
"""Return order."""
|
||||
return self.config_entry.data.get(CONF_ORDER, DEFAULT_ORDER)
|
||||
return self.config_entry.options.get(CONF_ORDER, DEFAULT_ORDER)
|
||||
|
||||
async def _async_update_data(self) -> SessionStats:
|
||||
"""Update transmission data."""
|
||||
|
@ -6,5 +6,5 @@
|
||||
"dependencies": ["auth", "application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/yolink",
|
||||
"iot_class": "cloud_push",
|
||||
"requirements": ["yolink-api==0.4.6"]
|
||||
"requirements": ["yolink-api==0.4.7"]
|
||||
}
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import voluptuous as vol
|
||||
from zha.application.const import BAUD_RATES, RadioType
|
||||
@ -12,8 +13,13 @@ from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH
|
||||
from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.const import (
|
||||
CONF_TYPE,
|
||||
EVENT_CORE_CONFIG_UPDATE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@ -204,6 +210,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown)
|
||||
)
|
||||
|
||||
@callback
|
||||
def update_config(event: Event) -> None:
|
||||
"""Handle Core config update."""
|
||||
zha_gateway.config.local_timezone = ZoneInfo(hass.config.time_zone)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, update_config)
|
||||
)
|
||||
|
||||
await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities()
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES)
|
||||
|
@ -15,6 +15,7 @@ import re
|
||||
import time
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import voluptuous as vol
|
||||
from zha.application.const import (
|
||||
@ -1273,6 +1274,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData:
|
||||
quirks_configuration=quirks_config,
|
||||
device_overrides=overrides_config,
|
||||
),
|
||||
local_timezone=ZoneInfo(hass.config.time_zone),
|
||||
)
|
||||
|
||||
|
||||
|
@ -21,7 +21,7 @@
|
||||
"zha",
|
||||
"universal_silabs_flasher"
|
||||
],
|
||||
"requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"],
|
||||
"requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.30"],
|
||||
"usb": [
|
||||
{
|
||||
"vid": "10C4",
|
||||
|
@ -817,9 +817,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non
|
||||
|
||||
This method is a coroutine.
|
||||
"""
|
||||
# CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir
|
||||
# so we need to run it in an executor job.
|
||||
config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config)
|
||||
config = CORE_CONFIG_SCHEMA(config)
|
||||
|
||||
# Only load auth during startup.
|
||||
if not hasattr(hass, "auth"):
|
||||
@ -1535,15 +1533,9 @@ async def async_process_component_config(
|
||||
return IntegrationConfigInfo(None, config_exceptions)
|
||||
|
||||
# No custom config validator, proceed with schema validation
|
||||
if config_schema := getattr(component, "CONFIG_SCHEMA", None):
|
||||
if hasattr(component, "CONFIG_SCHEMA"):
|
||||
try:
|
||||
if domain in config:
|
||||
# cv.isdir, cv.isfile, cv.isdevice are not async
|
||||
# friendly so we need to run this in executor
|
||||
schema = await hass.async_add_executor_job(config_schema, config)
|
||||
else:
|
||||
schema = config_schema(config)
|
||||
return IntegrationConfigInfo(schema, [])
|
||||
return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), [])
|
||||
except vol.Invalid as exc:
|
||||
exc_info = ConfigExceptionInfo(
|
||||
exc,
|
||||
|
@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2024
|
||||
MINOR_VERSION: Final = 8
|
||||
PATCH_VERSION: Final = "0"
|
||||
PATCH_VERSION: Final = "1"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2
|
||||
aiodiscover==2.1.0
|
||||
aiodns==3.2.0
|
||||
aiohttp-fast-zlib==0.1.1
|
||||
aiohttp==3.10.1
|
||||
aiohttp==3.10.2
|
||||
aiohttp_cors==0.7.0
|
||||
aiozoneinfo==0.2.1
|
||||
astral==2.2
|
||||
@ -31,7 +31,7 @@ habluetooth==3.1.3
|
||||
hass-nabucasa==0.81.1
|
||||
hassil==1.7.4
|
||||
home-assistant-bluetooth==1.12.2
|
||||
home-assistant-frontend==20240806.1
|
||||
home-assistant-frontend==20240809.0
|
||||
home-assistant-intents==2024.8.7
|
||||
httpx==0.27.0
|
||||
ifaddr==0.2.0
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2024.8.0"
|
||||
version = "2024.8.1"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
@ -24,7 +24,7 @@ classifiers = [
|
||||
requires-python = ">=3.12.0"
|
||||
dependencies = [
|
||||
"aiodns==3.2.0",
|
||||
"aiohttp==3.10.1",
|
||||
"aiohttp==3.10.2",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp-fast-zlib==0.1.1",
|
||||
"aiozoneinfo==0.2.1",
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
# Home Assistant Core
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.10.1
|
||||
aiohttp==3.10.2
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp-fast-zlib==0.1.1
|
||||
aiozoneinfo==0.2.1
|
||||
|
@ -350,7 +350,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.34
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==2.2.0
|
||||
aiorussound==2.2.2
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@ -410,7 +410,7 @@ aiowithings==3.0.2
|
||||
aioymaps==1.2.5
|
||||
|
||||
# homeassistant.components.airgradient
|
||||
airgradient==0.7.1
|
||||
airgradient==0.8.0
|
||||
|
||||
# homeassistant.components.airly
|
||||
airly==1.1.0
|
||||
@ -660,6 +660,9 @@ clearpasspy==1.0.2
|
||||
# homeassistant.components.sinch
|
||||
clx-sdk-xms==1.0.0
|
||||
|
||||
# homeassistant.components.coinbase
|
||||
coinbase-advanced-py==1.2.2
|
||||
|
||||
# homeassistant.components.coinbase
|
||||
coinbase==2.1.0
|
||||
|
||||
@ -732,7 +735,7 @@ devolo-home-control-api==0.18.3
|
||||
devolo-plc-api==1.4.1
|
||||
|
||||
# homeassistant.components.chacon_dio
|
||||
dio-chacon-wifi-api==1.1.0
|
||||
dio-chacon-wifi-api==1.2.0
|
||||
|
||||
# homeassistant.components.directv
|
||||
directv==0.4.0
|
||||
@ -1093,7 +1096,7 @@ hole==0.8.0
|
||||
holidays==0.53
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240806.1
|
||||
home-assistant-frontend==20240809.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.8.7
|
||||
@ -1351,7 +1354,7 @@ moat-ble==0.1.1
|
||||
moehlenhoff-alpha2==1.3.1
|
||||
|
||||
# homeassistant.components.monzo
|
||||
monzopy==1.3.0
|
||||
monzopy==1.3.2
|
||||
|
||||
# homeassistant.components.mopeka
|
||||
mopeka-iot-ble==0.8.0
|
||||
@ -1786,7 +1789,7 @@ pycsspeechtts==1.0.8
|
||||
# pycups==1.9.73
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==2.13.1
|
||||
pydaikin==2.13.2
|
||||
|
||||
# homeassistant.components.danfoss_air
|
||||
pydanfossair==0.1.0
|
||||
@ -1942,7 +1945,7 @@ pyisy==3.1.14
|
||||
pyitachip2ir==0.0.7
|
||||
|
||||
# homeassistant.components.jvc_projector
|
||||
pyjvcprojector==1.0.11
|
||||
pyjvcprojector==1.0.12
|
||||
|
||||
# homeassistant.components.kaleidescape
|
||||
pykaleidescape==1.0.1
|
||||
@ -2068,7 +2071,7 @@ pyombi==0.1.10
|
||||
pyopenuv==2023.02.0
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyopenweathermap==0.0.9
|
||||
pyopenweathermap==0.1.1
|
||||
|
||||
# homeassistant.components.opnsense
|
||||
pyopnsense==0.4.0
|
||||
@ -2280,7 +2283,7 @@ python-gitlab==1.6.0
|
||||
python-homeassistant-analytics==0.7.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==v6.1.1
|
||||
python-homewizard-energy==v6.2.0
|
||||
|
||||
# homeassistant.components.hp_ilo
|
||||
python-hpilo==4.4.3
|
||||
@ -2962,7 +2965,7 @@ yeelight==0.7.14
|
||||
yeelightsunflower==0.0.10
|
||||
|
||||
# homeassistant.components.yolink
|
||||
yolink-api==0.4.6
|
||||
yolink-api==0.4.7
|
||||
|
||||
# homeassistant.components.youless
|
||||
youless-api==2.1.2
|
||||
@ -2986,7 +2989,7 @@ zeroconf==0.132.2
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.28
|
||||
zha==0.0.30
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.12
|
||||
|
@ -332,7 +332,7 @@ aioridwell==2024.01.0
|
||||
aioruckus==0.34
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==2.2.0
|
||||
aiorussound==2.2.2
|
||||
|
||||
# homeassistant.components.ruuvi_gateway
|
||||
aioruuvigateway==0.1.0
|
||||
@ -392,7 +392,7 @@ aiowithings==3.0.2
|
||||
aioymaps==1.2.5
|
||||
|
||||
# homeassistant.components.airgradient
|
||||
airgradient==0.7.1
|
||||
airgradient==0.8.0
|
||||
|
||||
# homeassistant.components.airly
|
||||
airly==1.1.0
|
||||
@ -562,6 +562,9 @@ cached_ipaddress==0.3.0
|
||||
# homeassistant.components.caldav
|
||||
caldav==1.3.9
|
||||
|
||||
# homeassistant.components.coinbase
|
||||
coinbase-advanced-py==1.2.2
|
||||
|
||||
# homeassistant.components.coinbase
|
||||
coinbase==2.1.0
|
||||
|
||||
@ -625,7 +628,7 @@ devolo-home-control-api==0.18.3
|
||||
devolo-plc-api==1.4.1
|
||||
|
||||
# homeassistant.components.chacon_dio
|
||||
dio-chacon-wifi-api==1.1.0
|
||||
dio-chacon-wifi-api==1.2.0
|
||||
|
||||
# homeassistant.components.directv
|
||||
directv==0.4.0
|
||||
@ -916,7 +919,7 @@ hole==0.8.0
|
||||
holidays==0.53
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20240806.1
|
||||
home-assistant-frontend==20240809.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.8.7
|
||||
@ -1117,7 +1120,7 @@ moat-ble==0.1.1
|
||||
moehlenhoff-alpha2==1.3.1
|
||||
|
||||
# homeassistant.components.monzo
|
||||
monzopy==1.3.0
|
||||
monzopy==1.3.2
|
||||
|
||||
# homeassistant.components.mopeka
|
||||
mopeka-iot-ble==0.8.0
|
||||
@ -1433,7 +1436,7 @@ pycoolmasternet-async==0.1.5
|
||||
pycsspeechtts==1.0.8
|
||||
|
||||
# homeassistant.components.daikin
|
||||
pydaikin==2.13.1
|
||||
pydaikin==2.13.2
|
||||
|
||||
# homeassistant.components.deconz
|
||||
pydeconz==116
|
||||
@ -1547,7 +1550,7 @@ pyiss==1.0.1
|
||||
pyisy==3.1.14
|
||||
|
||||
# homeassistant.components.jvc_projector
|
||||
pyjvcprojector==1.0.11
|
||||
pyjvcprojector==1.0.12
|
||||
|
||||
# homeassistant.components.kaleidescape
|
||||
pykaleidescape==1.0.1
|
||||
@ -1652,7 +1655,7 @@ pyoctoprintapi==0.1.12
|
||||
pyopenuv==2023.02.0
|
||||
|
||||
# homeassistant.components.openweathermap
|
||||
pyopenweathermap==0.0.9
|
||||
pyopenweathermap==0.1.1
|
||||
|
||||
# homeassistant.components.opnsense
|
||||
pyopnsense==0.4.0
|
||||
@ -1804,7 +1807,7 @@ python-fullykiosk==0.0.14
|
||||
python-homeassistant-analytics==0.7.0
|
||||
|
||||
# homeassistant.components.homewizard
|
||||
python-homewizard-energy==v6.1.1
|
||||
python-homewizard-energy==v6.2.0
|
||||
|
||||
# homeassistant.components.izone
|
||||
python-izone==1.2.9
|
||||
@ -2339,7 +2342,7 @@ yalexs==6.4.3
|
||||
yeelight==0.7.14
|
||||
|
||||
# homeassistant.components.yolink
|
||||
yolink-api==0.4.6
|
||||
yolink-api==0.4.7
|
||||
|
||||
# homeassistant.components.youless
|
||||
youless-api==2.1.2
|
||||
@ -2360,7 +2363,7 @@ zeroconf==0.132.2
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha==0.0.28
|
||||
zha==0.0.30
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.57.0
|
||||
|
@ -5,13 +5,14 @@ from homeassistant.components.coinbase.const import (
|
||||
CONF_EXCHANGE_RATES,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
|
||||
|
||||
from .const import (
|
||||
GOOD_CURRENCY_2,
|
||||
GOOD_EXCHANGE_RATE,
|
||||
GOOD_EXCHANGE_RATE_2,
|
||||
MOCK_ACCOUNTS_RESPONSE,
|
||||
MOCK_ACCOUNTS_RESPONSE_V3,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
@ -54,6 +55,33 @@ def mocked_get_accounts(_, **kwargs):
|
||||
return MockGetAccounts(**kwargs)
|
||||
|
||||
|
||||
class MockGetAccountsV3:
|
||||
"""Mock accounts with pagination."""
|
||||
|
||||
def __init__(self, cursor=""):
|
||||
"""Init mocked object, forced to return two at a time."""
|
||||
ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3]
|
||||
start = ids.index(cursor) if cursor else 0
|
||||
|
||||
has_next = (target_end := start + 2) < len(MOCK_ACCOUNTS_RESPONSE_V3)
|
||||
end = target_end if has_next else -1
|
||||
next_cursor = ids[end] if has_next else ids[-1]
|
||||
self.accounts = {
|
||||
"accounts": MOCK_ACCOUNTS_RESPONSE_V3[start:end],
|
||||
"has_next": has_next,
|
||||
"cursor": next_cursor,
|
||||
}
|
||||
|
||||
def __getitem__(self, item):
|
||||
"""Handle subscript request."""
|
||||
return self.accounts[item]
|
||||
|
||||
|
||||
def mocked_get_accounts_v3(_, **kwargs):
|
||||
"""Return simplified accounts using mock."""
|
||||
return MockGetAccountsV3(**kwargs)
|
||||
|
||||
|
||||
def mock_get_current_user():
|
||||
"""Return a simplified mock user."""
|
||||
return {
|
||||
@ -74,6 +102,19 @@ def mock_get_exchange_rates():
|
||||
}
|
||||
|
||||
|
||||
def mock_get_portfolios():
|
||||
"""Return a mocked list of Coinbase portfolios."""
|
||||
return {
|
||||
"portfolios": [
|
||||
{
|
||||
"name": "Default",
|
||||
"uuid": "123456",
|
||||
"type": "DEFAULT",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
async def init_mock_coinbase(hass, currencies=None, rates=None):
|
||||
"""Init Coinbase integration for testing."""
|
||||
config_entry = MockConfigEntry(
|
||||
@ -93,3 +134,28 @@ async def init_mock_coinbase(hass, currencies=None, rates=None):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
||||
|
||||
async def init_mock_coinbase_v3(hass, currencies=None, rates=None):
|
||||
"""Init Coinbase integration for testing."""
|
||||
config_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
entry_id="080272b77a4f80c41b94d7cdc86fd826",
|
||||
unique_id=None,
|
||||
title="Test User v3",
|
||||
data={
|
||||
CONF_API_KEY: "organizations/123456",
|
||||
CONF_API_TOKEN: "AbCDeF",
|
||||
CONF_API_VERSION: "v3",
|
||||
},
|
||||
options={
|
||||
CONF_CURRENCIES: currencies or [],
|
||||
CONF_EXCHANGE_RATES: rates or [],
|
||||
},
|
||||
)
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return config_entry
|
||||
|
@ -31,3 +31,31 @@ MOCK_ACCOUNTS_RESPONSE = [
|
||||
"type": "fiat",
|
||||
},
|
||||
]
|
||||
|
||||
MOCK_ACCOUNTS_RESPONSE_V3 = [
|
||||
{
|
||||
"uuid": "123456789",
|
||||
"name": "BTC Wallet",
|
||||
"currency": GOOD_CURRENCY,
|
||||
"available_balance": {"value": "0.00001", "currency": GOOD_CURRENCY},
|
||||
"type": "ACCOUNT_TYPE_CRYPTO",
|
||||
"hold": {"value": "0", "currency": GOOD_CURRENCY},
|
||||
},
|
||||
{
|
||||
"uuid": "abcdefg",
|
||||
"name": "BTC Vault",
|
||||
"currency": GOOD_CURRENCY,
|
||||
"available_balance": {"value": "100.00", "currency": GOOD_CURRENCY},
|
||||
"type": "ACCOUNT_TYPE_VAULT",
|
||||
"hold": {"value": "0", "currency": GOOD_CURRENCY},
|
||||
},
|
||||
{
|
||||
"uuid": "987654321",
|
||||
"name": "USD Wallet",
|
||||
"currency": GOOD_CURRENCY_2,
|
||||
"available_balance": {"value": "9.90", "currency": GOOD_CURRENCY_2},
|
||||
"type": "ACCOUNT_TYPE_FIAT",
|
||||
"ready": True,
|
||||
"hold": {"value": "0", "currency": GOOD_CURRENCY_2},
|
||||
},
|
||||
]
|
||||
|
@ -3,40 +3,25 @@
|
||||
dict({
|
||||
'accounts': list([
|
||||
dict({
|
||||
'balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
}),
|
||||
'currency': dict({
|
||||
'code': 'BTC',
|
||||
}),
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
'id': '**REDACTED**',
|
||||
'is_vault': False,
|
||||
'name': 'BTC Wallet',
|
||||
'type': 'wallet',
|
||||
}),
|
||||
dict({
|
||||
'balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
}),
|
||||
'currency': dict({
|
||||
'code': 'BTC',
|
||||
}),
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'BTC',
|
||||
'id': '**REDACTED**',
|
||||
'is_vault': True,
|
||||
'name': 'BTC Vault',
|
||||
'type': 'vault',
|
||||
}),
|
||||
dict({
|
||||
'balance': dict({
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
}),
|
||||
'currency': dict({
|
||||
'code': 'USD',
|
||||
}),
|
||||
'amount': '**REDACTED**',
|
||||
'currency': 'USD',
|
||||
'id': '**REDACTED**',
|
||||
'is_vault': False,
|
||||
'name': 'USD Wallet',
|
||||
'type': 'fiat',
|
||||
}),
|
||||
]),
|
||||
'entry': dict({
|
||||
|
@ -14,15 +14,18 @@ from homeassistant.components.coinbase.const import (
|
||||
CONF_EXCHANGE_RATES,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
|
||||
from .common import (
|
||||
init_mock_coinbase,
|
||||
init_mock_coinbase_v3,
|
||||
mock_get_current_user,
|
||||
mock_get_exchange_rates,
|
||||
mock_get_portfolios,
|
||||
mocked_get_accounts,
|
||||
mocked_get_accounts_v3,
|
||||
)
|
||||
from .const import BAD_CURRENCY, BAD_EXCHANGE_RATE, GOOD_CURRENCY, GOOD_EXCHANGE_RATE
|
||||
|
||||
@ -53,16 +56,17 @@ async def test_form(hass: HomeAssistant) -> None:
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_API_KEY: "123456",
|
||||
CONF_API_TOKEN: "AbCDeF",
|
||||
},
|
||||
{CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == "Test User"
|
||||
assert result2["data"] == {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"}
|
||||
assert result2["data"] == {
|
||||
CONF_API_KEY: "123456",
|
||||
CONF_API_TOKEN: "AbCDeF",
|
||||
CONF_API_VERSION: "v2",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@ -314,3 +318,77 @@ async def test_option_catch_all_exception(hass: HomeAssistant) -> None:
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": "unknown"}
|
||||
|
||||
|
||||
async def test_form_v3(hass: HomeAssistant) -> None:
|
||||
"""Test we get the form."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {}
|
||||
|
||||
with (
|
||||
patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3),
|
||||
patch(
|
||||
"coinbase.rest.RESTClient.get_portfolios",
|
||||
return_value=mock_get_portfolios(),
|
||||
),
|
||||
patch(
|
||||
"coinbase.rest.RESTBase.get",
|
||||
return_value={"data": mock_get_exchange_rates()},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.coinbase.async_setup_entry",
|
||||
return_value=True,
|
||||
) as mock_setup_entry,
|
||||
):
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{CONF_API_KEY: "organizations/123456", CONF_API_TOKEN: "AbCDeF"},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == "Default"
|
||||
assert result2["data"] == {
|
||||
CONF_API_KEY: "organizations/123456",
|
||||
CONF_API_TOKEN: "AbCDeF",
|
||||
CONF_API_VERSION: "v3",
|
||||
}
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_option_form_v3(hass: HomeAssistant) -> None:
|
||||
"""Test we handle a good wallet currency option."""
|
||||
|
||||
with (
|
||||
patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3),
|
||||
patch(
|
||||
"coinbase.rest.RESTClient.get_portfolios",
|
||||
return_value=mock_get_portfolios(),
|
||||
),
|
||||
patch(
|
||||
"coinbase.rest.RESTBase.get",
|
||||
return_value={"data": mock_get_exchange_rates()},
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.coinbase.update_listener"
|
||||
) as mock_update_listener,
|
||||
):
|
||||
config_entry = await init_mock_coinbase_v3(hass)
|
||||
await hass.async_block_till_done()
|
||||
result = await hass.config_entries.options.async_init(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
result2 = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={
|
||||
CONF_CURRENCIES: [GOOD_CURRENCY],
|
||||
CONF_EXCHANGE_RATES: [GOOD_EXCHANGE_RATE],
|
||||
CONF_EXCHANGE_PRECISION: 5,
|
||||
},
|
||||
)
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_update_listener.mock_calls) == 1
|
||||
|
@ -7,6 +7,10 @@
|
||||
"1": {
|
||||
"title": "Home Assistant (mydoorbird_motion)",
|
||||
"value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD"
|
||||
},
|
||||
"2": {
|
||||
"title": "externally added event",
|
||||
"value": "http://127.0.0.1/"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -49,4 +49,4 @@ async def test_reset_favorites_button(
|
||||
DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True
|
||||
)
|
||||
assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN
|
||||
assert doorbird_entry.api.delete_favorite.call_count == 2
|
||||
assert doorbird_entry.api.delete_favorite.call_count == 3
|
||||
|
@ -219,6 +219,101 @@ async def test_migrate_hourly_gas_to_mbus(
|
||||
)
|
||||
|
||||
|
||||
async def test_migrate_gas_with_devid_to_mbus(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock],
|
||||
) -> None:
|
||||
"""Test migration of unique_id."""
|
||||
(connection_factory, transport, protocol) = dsmr_connection_fixture
|
||||
|
||||
mock_entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="/dev/ttyUSB0",
|
||||
data={
|
||||
"port": "/dev/ttyUSB0",
|
||||
"dsmr_version": "5B",
|
||||
"serial_id": "1234",
|
||||
"serial_id_gas": "37464C4F32313139303333373331",
|
||||
},
|
||||
options={
|
||||
"time_between_update": 0,
|
||||
},
|
||||
)
|
||||
|
||||
mock_entry.add_to_hass(hass)
|
||||
|
||||
old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading"
|
||||
|
||||
device = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_entry.entry_id,
|
||||
identifiers={(DOMAIN, "37464C4F32313139303333373331")},
|
||||
name="Gas Meter",
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity: er.RegistryEntry = entity_registry.async_get_or_create(
|
||||
suggested_object_id="gas_meter_reading",
|
||||
disabled_by=None,
|
||||
domain=SENSOR_DOMAIN,
|
||||
platform=DOMAIN,
|
||||
device_id=device.id,
|
||||
unique_id=old_unique_id,
|
||||
config_entry=mock_entry,
|
||||
)
|
||||
assert entity.unique_id == old_unique_id
|
||||
await hass.async_block_till_done()
|
||||
|
||||
telegram = Telegram()
|
||||
telegram.add(
|
||||
MBUS_DEVICE_TYPE,
|
||||
CosemObject((0, 1), [{"value": "003", "unit": ""}]),
|
||||
"MBUS_DEVICE_TYPE",
|
||||
)
|
||||
telegram.add(
|
||||
MBUS_EQUIPMENT_IDENTIFIER,
|
||||
CosemObject(
|
||||
(0, 1),
|
||||
[{"value": "37464C4F32313139303333373331", "unit": ""}],
|
||||
),
|
||||
"MBUS_EQUIPMENT_IDENTIFIER",
|
||||
)
|
||||
telegram.add(
|
||||
MBUS_METER_READING,
|
||||
MBusObject(
|
||||
(0, 1),
|
||||
[
|
||||
{"value": datetime.datetime.fromtimestamp(1551642213)},
|
||||
{"value": Decimal(745.695), "unit": "m3"},
|
||||
],
|
||||
),
|
||||
"MBUS_METER_READING",
|
||||
)
|
||||
|
||||
assert await hass.config_entries.async_setup(mock_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
telegram_callback = connection_factory.call_args_list[0][0][2]
|
||||
|
||||
# simulate a telegram pushed from the smartmeter and parsed by dsmr_parser
|
||||
telegram_callback(telegram)
|
||||
|
||||
# after receiving telegram entities need to have the chance to be created
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id)
|
||||
is None
|
||||
)
|
||||
assert (
|
||||
entity_registry.async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331"
|
||||
)
|
||||
== "sensor.gas_meter_reading"
|
||||
)
|
||||
|
||||
|
||||
async def test_migrate_gas_to_mbus_exists(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
|
@ -122,7 +122,7 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No
|
||||
"services": [
|
||||
{"service": "test_service1"},
|
||||
{
|
||||
"service": "test_service2",
|
||||
"action": "test_service2",
|
||||
"data": {
|
||||
"target": "unnamed device",
|
||||
"data": {"test": "message", "default": "default"},
|
||||
@ -202,6 +202,41 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No
|
||||
)
|
||||
|
||||
|
||||
async def test_invalid_configuration(
|
||||
hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test failing to set up group with an invalid configuration."""
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
"group",
|
||||
{},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
group_setup = [
|
||||
{
|
||||
"platform": "group",
|
||||
"name": "My invalid notification group",
|
||||
"services": [
|
||||
{
|
||||
"service": "test_service1",
|
||||
"action": "test_service2",
|
||||
"data": {
|
||||
"target": "unnamed device",
|
||||
"data": {"test": "message", "default": "default"},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
await help_setup_notify(hass, tmp_path, {"service1": 1, "service2": 2}, group_setup)
|
||||
assert not hass.services.has_service("notify", "my_invalid_notification_group")
|
||||
assert (
|
||||
"Invalid config for 'notify' from integration 'group':"
|
||||
" Cannot specify both 'service' and 'action'." in caplog.text
|
||||
)
|
||||
|
||||
|
||||
async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None:
|
||||
"""Verify we can reload the notify service."""
|
||||
assert await async_setup_component(
|
||||
@ -219,7 +254,7 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None:
|
||||
{
|
||||
"name": "group_notify",
|
||||
"platform": "group",
|
||||
"services": [{"service": "test_service1"}],
|
||||
"services": [{"action": "test_service1"}],
|
||||
}
|
||||
],
|
||||
)
|
||||
|
@ -45,7 +45,7 @@ CONFIG = {
|
||||
VALID_YAML_CONFIG = {CONF_API_KEY: "foo"}
|
||||
|
||||
|
||||
def _create_mocked_owm_client(is_valid: bool):
|
||||
def _create_mocked_owm_factory(is_valid: bool):
|
||||
current_weather = CurrentWeather(
|
||||
date_time=datetime.fromtimestamp(1714063536, tz=UTC),
|
||||
temperature=6.84,
|
||||
@ -118,18 +118,18 @@ def _create_mocked_owm_client(is_valid: bool):
|
||||
def mock_owm_client():
|
||||
"""Mock config_flow OWMClient."""
|
||||
with patch(
|
||||
"homeassistant.components.openweathermap.OWMClient",
|
||||
) as owm_client_mock:
|
||||
yield owm_client_mock
|
||||
"homeassistant.components.openweathermap.create_owm_client",
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture(name="config_flow_owm_client_mock")
|
||||
def mock_config_flow_owm_client():
|
||||
"""Mock config_flow OWMClient."""
|
||||
with patch(
|
||||
"homeassistant.components.openweathermap.utils.OWMClient",
|
||||
) as config_flow_owm_client_mock:
|
||||
yield config_flow_owm_client_mock
|
||||
"homeassistant.components.openweathermap.utils.create_owm_client",
|
||||
) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
async def test_successful_config_flow(
|
||||
@ -138,7 +138,7 @@ async def test_successful_config_flow(
|
||||
config_flow_owm_client_mock,
|
||||
) -> None:
|
||||
"""Test that the form is served with valid input."""
|
||||
mock = _create_mocked_owm_client(True)
|
||||
mock = _create_mocked_owm_factory(True)
|
||||
owm_client_mock.return_value = mock
|
||||
config_flow_owm_client_mock.return_value = mock
|
||||
|
||||
@ -177,7 +177,7 @@ async def test_abort_config_flow(
|
||||
config_flow_owm_client_mock,
|
||||
) -> None:
|
||||
"""Test that the form is served with same data."""
|
||||
mock = _create_mocked_owm_client(True)
|
||||
mock = _create_mocked_owm_factory(True)
|
||||
owm_client_mock.return_value = mock
|
||||
config_flow_owm_client_mock.return_value = mock
|
||||
|
||||
@ -200,7 +200,7 @@ async def test_config_flow_options_change(
|
||||
config_flow_owm_client_mock,
|
||||
) -> None:
|
||||
"""Test that the options form."""
|
||||
mock = _create_mocked_owm_client(True)
|
||||
mock = _create_mocked_owm_factory(True)
|
||||
owm_client_mock.return_value = mock
|
||||
config_flow_owm_client_mock.return_value = mock
|
||||
|
||||
@ -261,7 +261,7 @@ async def test_form_invalid_api_key(
|
||||
config_flow_owm_client_mock,
|
||||
) -> None:
|
||||
"""Test that the form is served with no input."""
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(False)
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(False)
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
|
||||
)
|
||||
@ -269,7 +269,7 @@ async def test_form_invalid_api_key(
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "invalid_api_key"}
|
||||
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True)
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=CONFIG
|
||||
)
|
||||
@ -282,7 +282,7 @@ async def test_form_api_call_error(
|
||||
config_flow_owm_client_mock,
|
||||
) -> None:
|
||||
"""Test setting up with api call error."""
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True)
|
||||
config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True)
|
||||
config_flow_owm_client_mock.side_effect = RequestError("oops")
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}, data=CONFIG
|
||||
|
@ -748,7 +748,7 @@ def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None:
|
||||
session.add(States(state="on"))
|
||||
session.commit()
|
||||
|
||||
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||
assert migration.rebuild_sqlite_table(session_maker, engine, States) is True
|
||||
|
||||
with session_scope(session=session_maker()) as session:
|
||||
assert session.query(States).count() == 1
|
||||
@ -776,13 +776,13 @@ def test_rebuild_sqlite_states_table_missing_fails(
|
||||
session.connection().execute(text("DROP TABLE states"))
|
||||
session.commit()
|
||||
|
||||
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||
assert migration.rebuild_sqlite_table(session_maker, engine, States) is False
|
||||
assert "Error recreating SQLite table states" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
# Now rebuild the events table to make sure the database did not
|
||||
# get corrupted
|
||||
migration.rebuild_sqlite_table(session_maker, engine, Events)
|
||||
assert migration.rebuild_sqlite_table(session_maker, engine, Events) is True
|
||||
|
||||
with session_scope(session=session_maker()) as session:
|
||||
assert session.query(Events).count() == 1
|
||||
@ -812,7 +812,7 @@ def test_rebuild_sqlite_states_table_extra_columns(
|
||||
text("ALTER TABLE states ADD COLUMN extra_column TEXT")
|
||||
)
|
||||
|
||||
migration.rebuild_sqlite_table(session_maker, engine, States)
|
||||
assert migration.rebuild_sqlite_table(session_maker, engine, States) is True
|
||||
assert "Error recreating SQLite table states" not in caplog.text
|
||||
|
||||
with session_scope(session=session_maker()) as session:
|
||||
@ -905,7 +905,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
|
||||
for table, column in constraints_to_recreate
|
||||
for dropped_constraint in migration._drop_foreign_key_constraints(
|
||||
session_maker, engine, table, column
|
||||
)
|
||||
)[1]
|
||||
]
|
||||
assert dropped_constraints_1 == expected_dropped_constraints[db_engine]
|
||||
|
||||
@ -917,7 +917,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
|
||||
for table, column in constraints_to_recreate
|
||||
for dropped_constraint in migration._drop_foreign_key_constraints(
|
||||
session_maker, engine, table, column
|
||||
)
|
||||
)[1]
|
||||
]
|
||||
assert dropped_constraints_2 == []
|
||||
|
||||
@ -936,7 +936,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None:
|
||||
for table, column in constraints_to_recreate
|
||||
for dropped_constraint in migration._drop_foreign_key_constraints(
|
||||
session_maker, engine, table, column
|
||||
)
|
||||
)[1]
|
||||
]
|
||||
assert dropped_constraints_3 == expected_dropped_constraints[db_engine]
|
||||
|
||||
|
@ -7,6 +7,7 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine, inspect
|
||||
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components import recorder
|
||||
@ -444,3 +445,348 @@ async def test_migrate_can_resume_ix_states_event_id_removed(
|
||||
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
|
||||
|
||||
await hass.async_stop()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.parametrize("enable_migrate_event_ids", [True])
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_out_of_disk_space_while_rebuild_states_table(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test that we can recover from out of disk space while rebuilding the states table.
|
||||
|
||||
This case tests the migration still happens if
|
||||
ix_states_event_id is removed from the states table.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
now = dt_util.utcnow()
|
||||
one_second_past = now - timedelta(seconds=1)
|
||||
mock_state = State(
|
||||
"sensor.test",
|
||||
"old",
|
||||
{"last_reset": now.isoformat()},
|
||||
last_changed=one_second_past,
|
||||
last_updated=now,
|
||||
)
|
||||
state_changed_event = Event(
|
||||
EVENT_STATE_CHANGED,
|
||||
{
|
||||
"entity_id": "sensor.test",
|
||||
"old_state": None,
|
||||
"new_state": mock_state,
|
||||
},
|
||||
EventOrigin.local,
|
||||
time_fired_timestamp=now.timestamp(),
|
||||
)
|
||||
custom_event = Event(
|
||||
"custom_event",
|
||||
{"entity_id": "sensor.custom"},
|
||||
EventOrigin.local,
|
||||
time_fired_timestamp=now.timestamp(),
|
||||
)
|
||||
number_of_migrations = 5
|
||||
|
||||
def _get_event_id_foreign_keys():
|
||||
assert instance.engine is not None
|
||||
return next(
|
||||
(
|
||||
fk # type: ignore[misc]
|
||||
for fk in inspect(instance.engine).get_foreign_keys("states")
|
||||
if fk["constrained_columns"] == ["event_id"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
def _get_states_index_names():
|
||||
with session_scope(hass=hass) as session:
|
||||
return inspect(session.connection()).get_indexes("states")
|
||||
|
||||
with (
|
||||
patch.object(recorder, "db_schema", old_db_schema),
|
||||
patch.object(
|
||||
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||
),
|
||||
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
|
||||
patch.object(core, "EventTypes", old_db_schema.EventTypes),
|
||||
patch.object(core, "EventData", old_db_schema.EventData),
|
||||
patch.object(core, "States", old_db_schema.States),
|
||||
patch.object(core, "Events", old_db_schema.Events),
|
||||
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
|
||||
patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"),
|
||||
patch(
|
||||
"homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids"
|
||||
),
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
def _add_data():
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(old_db_schema.Events.from_event(custom_event))
|
||||
session.add(old_db_schema.States.from_event(state_changed_event))
|
||||
|
||||
await instance.async_add_executor_job(_add_data)
|
||||
await hass.async_block_till_done()
|
||||
await instance.async_block_till_done()
|
||||
|
||||
await instance.async_add_executor_job(
|
||||
migration._drop_index,
|
||||
instance.get_session,
|
||||
"states",
|
||||
"ix_states_event_id",
|
||||
)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(
|
||||
_get_states_index_names
|
||||
)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is True
|
||||
assert (
|
||||
await instance.async_add_executor_job(_get_event_id_foreign_keys)
|
||||
is not None
|
||||
)
|
||||
|
||||
await hass.async_stop()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||
|
||||
# Simulate out of disk space while rebuilding the states table by
|
||||
# - patching CreateTable to raise SQLAlchemyError for SQLite
|
||||
# - patching DropConstraint to raise InternalError for MySQL and PostgreSQL
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.recorder.migration.CreateTable",
|
||||
side_effect=SQLAlchemyError,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.recorder.migration.DropConstraint",
|
||||
side_effect=OperationalError(
|
||||
None, None, OSError("No space left on device")
|
||||
),
|
||||
),
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# We need to wait for all the migration tasks to complete
|
||||
# before we can check the database.
|
||||
for _ in range(number_of_migrations):
|
||||
await instance.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(
|
||||
_get_states_index_names
|
||||
)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is True
|
||||
assert "Error recreating SQLite table states" in caplog.text
|
||||
assert await instance.async_add_executor_job(_get_event_id_foreign_keys)
|
||||
|
||||
await hass.async_stop()
|
||||
|
||||
# Now run it again to verify the table rebuild tries again
|
||||
caplog.clear()
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# We need to wait for all the migration tasks to complete
|
||||
# before we can check the database.
|
||||
for _ in range(number_of_migrations):
|
||||
await instance.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is False
|
||||
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
|
||||
assert "ix_states_event_id" not in states_index_names
|
||||
assert "Rebuilding SQLite table states finished" in caplog.text
|
||||
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
|
||||
|
||||
await hass.async_stop()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.skip_on_db_engine(["sqlite"])
|
||||
@pytest.mark.parametrize("enable_migrate_event_ids", [True])
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
|
||||
async def test_out_of_disk_space_while_removing_foreign_key(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
recorder_db_url: str,
|
||||
) -> None:
|
||||
"""Test that we can recover from out of disk space while removing the foreign key.
|
||||
|
||||
This case tests the migration still happens if
|
||||
ix_states_event_id is removed from the states table.
|
||||
"""
|
||||
importlib.import_module(SCHEMA_MODULE)
|
||||
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||
now = dt_util.utcnow()
|
||||
one_second_past = now - timedelta(seconds=1)
|
||||
mock_state = State(
|
||||
"sensor.test",
|
||||
"old",
|
||||
{"last_reset": now.isoformat()},
|
||||
last_changed=one_second_past,
|
||||
last_updated=now,
|
||||
)
|
||||
state_changed_event = Event(
|
||||
EVENT_STATE_CHANGED,
|
||||
{
|
||||
"entity_id": "sensor.test",
|
||||
"old_state": None,
|
||||
"new_state": mock_state,
|
||||
},
|
||||
EventOrigin.local,
|
||||
time_fired_timestamp=now.timestamp(),
|
||||
)
|
||||
custom_event = Event(
|
||||
"custom_event",
|
||||
{"entity_id": "sensor.custom"},
|
||||
EventOrigin.local,
|
||||
time_fired_timestamp=now.timestamp(),
|
||||
)
|
||||
number_of_migrations = 5
|
||||
|
||||
def _get_event_id_foreign_keys():
|
||||
assert instance.engine is not None
|
||||
return next(
|
||||
(
|
||||
fk # type: ignore[misc]
|
||||
for fk in inspect(instance.engine).get_foreign_keys("states")
|
||||
if fk["constrained_columns"] == ["event_id"]
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
def _get_states_index_names():
|
||||
with session_scope(hass=hass) as session:
|
||||
return inspect(session.connection()).get_indexes("states")
|
||||
|
||||
with (
|
||||
patch.object(recorder, "db_schema", old_db_schema),
|
||||
patch.object(
|
||||
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||
),
|
||||
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
|
||||
patch.object(core, "EventTypes", old_db_schema.EventTypes),
|
||||
patch.object(core, "EventData", old_db_schema.EventData),
|
||||
patch.object(core, "States", old_db_schema.States),
|
||||
patch.object(core, "Events", old_db_schema.Events),
|
||||
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
|
||||
patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"),
|
||||
patch(
|
||||
"homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids"
|
||||
),
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
def _add_data():
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(old_db_schema.Events.from_event(custom_event))
|
||||
session.add(old_db_schema.States.from_event(state_changed_event))
|
||||
|
||||
await instance.async_add_executor_job(_add_data)
|
||||
await hass.async_block_till_done()
|
||||
await instance.async_block_till_done()
|
||||
|
||||
await instance.async_add_executor_job(
|
||||
migration._drop_index,
|
||||
instance.get_session,
|
||||
"states",
|
||||
"ix_states_event_id",
|
||||
)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(
|
||||
_get_states_index_names
|
||||
)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is True
|
||||
assert (
|
||||
await instance.async_add_executor_job(_get_event_id_foreign_keys)
|
||||
is not None
|
||||
)
|
||||
|
||||
await hass.async_stop()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||
|
||||
# Simulate out of disk space while removing the foreign key from the states table by
|
||||
# - patching DropConstraint to raise InternalError for MySQL and PostgreSQL
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.recorder.migration.DropConstraint",
|
||||
side_effect=OperationalError(
|
||||
None, None, OSError("No space left on device")
|
||||
),
|
||||
),
|
||||
):
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# We need to wait for all the migration tasks to complete
|
||||
# before we can check the database.
|
||||
for _ in range(number_of_migrations):
|
||||
await instance.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(
|
||||
_get_states_index_names
|
||||
)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is True
|
||||
assert await instance.async_add_executor_job(_get_event_id_foreign_keys)
|
||||
|
||||
await hass.async_stop()
|
||||
|
||||
# Now run it again to verify the table rebuild tries again
|
||||
caplog.clear()
|
||||
async with (
|
||||
async_test_home_assistant() as hass,
|
||||
async_test_recorder(hass) as instance,
|
||||
):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# We need to wait for all the migration tasks to complete
|
||||
# before we can check the database.
|
||||
for _ in range(number_of_migrations):
|
||||
await instance.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
|
||||
states_index_names = {index["name"] for index in states_indexes}
|
||||
assert instance.use_legacy_events_index is False
|
||||
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
|
||||
assert "ix_states_event_id" not in states_index_names
|
||||
assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None
|
||||
|
||||
await hass.async_stop()
|
||||
|
@ -3,6 +3,7 @@
|
||||
import asyncio
|
||||
import typing
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
import zoneinfo
|
||||
|
||||
import pytest
|
||||
from zigpy.application import ControllerApplication
|
||||
@ -16,7 +17,7 @@ from homeassistant.components.zha.const import (
|
||||
CONF_USB_PATH,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.zha.helpers import get_zha_data
|
||||
from homeassistant.components.zha.helpers import get_zha_data, get_zha_gateway
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
MAJOR_VERSION,
|
||||
@ -288,3 +289,23 @@ async def test_shutdown_on_ha_stop(
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(mock_shutdown.mock_calls) == 1
|
||||
|
||||
|
||||
async def test_timezone_update(
|
||||
hass: HomeAssistant,
|
||||
config_entry: MockConfigEntry,
|
||||
mock_zigpy_connect: ControllerApplication,
|
||||
) -> None:
|
||||
"""Test that the ZHA gateway timezone is updated when HA timezone changes."""
|
||||
config_entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(config_entry.entry_id)
|
||||
gateway = get_zha_gateway(hass)
|
||||
|
||||
assert hass.config.time_zone == "US/Pacific"
|
||||
assert gateway.config.local_timezone == zoneinfo.ZoneInfo("US/Pacific")
|
||||
|
||||
await hass.config.async_update(time_zone="America/New_York")
|
||||
|
||||
assert hass.config.time_zone == "America/New_York"
|
||||
assert gateway.config.local_timezone == zoneinfo.ZoneInfo("America/New_York")
|
||||
|
Loading…
x
Reference in New Issue
Block a user