mirror of
https://github.com/home-assistant/core.git
synced 2025-07-15 09:17:10 +00:00
Update metoffice to use DataHub API (#131425)
* Update metoffice to use DataHub API * Reauth test * Updated to datapoint 0.11.0 * Less hacky check for day/night in twice-daily forecasts * Updated to datapoint 0.12.1, added daily forecast * addressed review comments * one more nit * validate credewntials in reauth flow * Addressed review comments * Attempt to improve coverage * Addressed comments * Reverted unnecessary reordering * Update homeassistant/components/metoffice/sensor.py * Update tests/components/metoffice/test_sensor.py * Update homeassistant/components/metoffice/sensor.py --------- Co-authored-by: Franck Nijhof <git@frenck.dev> Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
This commit is contained in:
parent
cd9339903f
commit
4f24d63de1
@ -4,10 +4,10 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import re
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import datapoint
|
import datapoint
|
||||||
|
import datapoint.Forecast
|
||||||
|
import datapoint.Manager
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@ -17,9 +17,8 @@ from homeassistant.const import (
|
|||||||
CONF_NAME,
|
CONF_NAME,
|
||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import TimestampDataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import TimestampDataUpdateCoordinator
|
||||||
|
|
||||||
@ -30,11 +29,8 @@ from .const import (
|
|||||||
METOFFICE_DAILY_COORDINATOR,
|
METOFFICE_DAILY_COORDINATOR,
|
||||||
METOFFICE_HOURLY_COORDINATOR,
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
MODE_3HOURLY,
|
|
||||||
MODE_DAILY,
|
|
||||||
)
|
)
|
||||||
from .data import MetOfficeData
|
from .helpers import fetch_data
|
||||||
from .helpers import fetch_data, fetch_site
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -51,59 +47,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
|
|
||||||
coordinates = f"{latitude}_{longitude}"
|
coordinates = f"{latitude}_{longitude}"
|
||||||
|
|
||||||
@callback
|
connection = datapoint.Manager.Manager(api_key=api_key)
|
||||||
def update_unique_id(
|
|
||||||
entity_entry: er.RegistryEntry,
|
|
||||||
) -> dict[str, Any] | None:
|
|
||||||
"""Update unique ID of entity entry."""
|
|
||||||
|
|
||||||
if entity_entry.domain != Platform.SENSOR:
|
async def async_update_hourly() -> datapoint.Forecast:
|
||||||
return None
|
|
||||||
|
|
||||||
name_to_key = {
|
|
||||||
"Station Name": "name",
|
|
||||||
"Weather": "weather",
|
|
||||||
"Temperature": "temperature",
|
|
||||||
"Feels Like Temperature": "feels_like_temperature",
|
|
||||||
"Wind Speed": "wind_speed",
|
|
||||||
"Wind Direction": "wind_direction",
|
|
||||||
"Wind Gust": "wind_gust",
|
|
||||||
"Visibility": "visibility",
|
|
||||||
"Visibility Distance": "visibility_distance",
|
|
||||||
"UV Index": "uv",
|
|
||||||
"Probability of Precipitation": "precipitation",
|
|
||||||
"Humidity": "humidity",
|
|
||||||
}
|
|
||||||
|
|
||||||
match = re.search(f"(?P<name>.*)_{coordinates}.*", entity_entry.unique_id)
|
|
||||||
|
|
||||||
if match is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if (name := match.group("name")) in name_to_key:
|
|
||||||
return {
|
|
||||||
"new_unique_id": entity_entry.unique_id.replace(name, name_to_key[name])
|
|
||||||
}
|
|
||||||
return None
|
|
||||||
|
|
||||||
await er.async_migrate_entries(hass, entry.entry_id, update_unique_id)
|
|
||||||
|
|
||||||
connection = datapoint.connection(api_key=api_key)
|
|
||||||
|
|
||||||
site = await hass.async_add_executor_job(
|
|
||||||
fetch_site, connection, latitude, longitude
|
|
||||||
)
|
|
||||||
if site is None:
|
|
||||||
raise ConfigEntryNotReady
|
|
||||||
|
|
||||||
async def async_update_3hourly() -> MetOfficeData:
|
|
||||||
return await hass.async_add_executor_job(
|
return await hass.async_add_executor_job(
|
||||||
fetch_data, connection, site, MODE_3HOURLY
|
fetch_data, connection, latitude, longitude, "hourly"
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_update_daily() -> MetOfficeData:
|
async def async_update_daily() -> datapoint.Forecast:
|
||||||
return await hass.async_add_executor_job(
|
return await hass.async_add_executor_job(
|
||||||
fetch_data, connection, site, MODE_DAILY
|
fetch_data, connection, latitude, longitude, "daily"
|
||||||
)
|
)
|
||||||
|
|
||||||
metoffice_hourly_coordinator = TimestampDataUpdateCoordinator(
|
metoffice_hourly_coordinator = TimestampDataUpdateCoordinator(
|
||||||
@ -111,7 +64,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
name=f"MetOffice Hourly Coordinator for {site_name}",
|
name=f"MetOffice Hourly Coordinator for {site_name}",
|
||||||
update_method=async_update_3hourly,
|
update_method=async_update_hourly,
|
||||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -2,10 +2,14 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import datapoint
|
import datapoint
|
||||||
|
from datapoint.exceptions import APIException
|
||||||
|
import datapoint.Manager
|
||||||
|
from requests import HTTPError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
@ -15,30 +19,41 @@ from homeassistant.exceptions import HomeAssistantError
|
|||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .helpers import fetch_site
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
async def validate_input(
|
||||||
|
hass: HomeAssistant, latitude: float, longitude: float, api_key: str
|
||||||
|
) -> dict[str, Any]:
|
||||||
"""Validate that the user input allows us to connect to DataPoint.
|
"""Validate that the user input allows us to connect to DataPoint.
|
||||||
|
|
||||||
Data has the keys from DATA_SCHEMA with values provided by the user.
|
Data has the keys from DATA_SCHEMA with values provided by the user.
|
||||||
"""
|
"""
|
||||||
latitude = data[CONF_LATITUDE]
|
errors = {}
|
||||||
longitude = data[CONF_LONGITUDE]
|
connection = datapoint.Manager.Manager(api_key=api_key)
|
||||||
api_key = data[CONF_API_KEY]
|
|
||||||
|
|
||||||
connection = datapoint.connection(api_key=api_key)
|
try:
|
||||||
|
forecast = await hass.async_add_executor_job(
|
||||||
|
connection.get_forecast,
|
||||||
|
latitude,
|
||||||
|
longitude,
|
||||||
|
"daily",
|
||||||
|
False,
|
||||||
|
)
|
||||||
|
|
||||||
site = await hass.async_add_executor_job(
|
except (HTTPError, APIException) as err:
|
||||||
fetch_site, connection, latitude, longitude
|
if isinstance(err, HTTPError) and err.response.status_code == 401:
|
||||||
)
|
errors["base"] = "invalid_auth"
|
||||||
|
else:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
return {"site_name": forecast.name, "errors": errors}
|
||||||
|
|
||||||
if site is None:
|
return {"errors": errors}
|
||||||
raise CannotConnect
|
|
||||||
|
|
||||||
return {"site_name": site.name}
|
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeConfigFlow(ConfigFlow, domain=DOMAIN):
|
class MetOfficeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
@ -57,15 +72,17 @@ class MetOfficeConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
try:
|
result = await validate_input(
|
||||||
info = await validate_input(self.hass, user_input)
|
self.hass,
|
||||||
except CannotConnect:
|
latitude=user_input[CONF_LATITUDE],
|
||||||
errors["base"] = "cannot_connect"
|
longitude=user_input[CONF_LONGITUDE],
|
||||||
except Exception:
|
api_key=user_input[CONF_API_KEY],
|
||||||
_LOGGER.exception("Unexpected exception")
|
)
|
||||||
errors["base"] = "unknown"
|
|
||||||
else:
|
errors = result["errors"]
|
||||||
user_input[CONF_NAME] = info["site_name"]
|
|
||||||
|
if not errors:
|
||||||
|
user_input[CONF_NAME] = result["site_name"]
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=user_input[CONF_NAME], data=user_input
|
title=user_input[CONF_NAME], data=user_input
|
||||||
)
|
)
|
||||||
@ -83,7 +100,51 @@ class MetOfficeConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=data_schema, errors=errors
|
step_id="user",
|
||||||
|
data_schema=data_schema,
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Perform reauth upon an API authentication error."""
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Dialog that informs the user that reauth is required."""
|
||||||
|
errors = {}
|
||||||
|
|
||||||
|
entry = self._get_reauth_entry()
|
||||||
|
if user_input is not None:
|
||||||
|
result = await validate_input(
|
||||||
|
self.hass,
|
||||||
|
latitude=entry.data[CONF_LATITUDE],
|
||||||
|
longitude=entry.data[CONF_LONGITUDE],
|
||||||
|
api_key=user_input[CONF_API_KEY],
|
||||||
|
)
|
||||||
|
|
||||||
|
errors = result["errors"]
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self._get_reauth_entry(),
|
||||||
|
data_updates=user_input,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_API_KEY): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
description_placeholders={
|
||||||
|
"docs_url": ("https://www.home-assistant.io/integrations/metoffice")
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,6 +18,17 @@ from homeassistant.components.weather import (
|
|||||||
ATTR_CONDITION_SUNNY,
|
ATTR_CONDITION_SUNNY,
|
||||||
ATTR_CONDITION_WINDY,
|
ATTR_CONDITION_WINDY,
|
||||||
ATTR_CONDITION_WINDY_VARIANT,
|
ATTR_CONDITION_WINDY_VARIANT,
|
||||||
|
ATTR_FORECAST_CONDITION,
|
||||||
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP,
|
||||||
|
ATTR_FORECAST_NATIVE_PRESSURE,
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP,
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
ATTR_FORECAST_PRECIPITATION,
|
||||||
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
|
ATTR_FORECAST_UV_INDEX,
|
||||||
|
ATTR_FORECAST_WIND_BEARING,
|
||||||
)
|
)
|
||||||
|
|
||||||
DOMAIN = "metoffice"
|
DOMAIN = "metoffice"
|
||||||
@ -33,22 +44,19 @@ METOFFICE_DAILY_COORDINATOR = "metoffice_daily_coordinator"
|
|||||||
METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions"
|
METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions"
|
||||||
METOFFICE_NAME = "metoffice_name"
|
METOFFICE_NAME = "metoffice_name"
|
||||||
|
|
||||||
MODE_3HOURLY = "3hourly"
|
CONDITION_CLASSES: dict[str, list[int]] = {
|
||||||
MODE_DAILY = "daily"
|
ATTR_CONDITION_CLEAR_NIGHT: [0],
|
||||||
|
ATTR_CONDITION_CLOUDY: [7, 8],
|
||||||
CONDITION_CLASSES: dict[str, list[str]] = {
|
ATTR_CONDITION_FOG: [5, 6],
|
||||||
ATTR_CONDITION_CLEAR_NIGHT: ["0"],
|
ATTR_CONDITION_HAIL: [19, 20, 21],
|
||||||
ATTR_CONDITION_CLOUDY: ["7", "8"],
|
ATTR_CONDITION_LIGHTNING: [30],
|
||||||
ATTR_CONDITION_FOG: ["5", "6"],
|
ATTR_CONDITION_LIGHTNING_RAINY: [28, 29],
|
||||||
ATTR_CONDITION_HAIL: ["19", "20", "21"],
|
ATTR_CONDITION_PARTLYCLOUDY: [2, 3],
|
||||||
ATTR_CONDITION_LIGHTNING: ["30"],
|
ATTR_CONDITION_POURING: [13, 14, 15],
|
||||||
ATTR_CONDITION_LIGHTNING_RAINY: ["28", "29"],
|
ATTR_CONDITION_RAINY: [9, 10, 11, 12],
|
||||||
ATTR_CONDITION_PARTLYCLOUDY: ["2", "3"],
|
ATTR_CONDITION_SNOWY: [22, 23, 24, 25, 26, 27],
|
||||||
ATTR_CONDITION_POURING: ["13", "14", "15"],
|
ATTR_CONDITION_SNOWY_RAINY: [16, 17, 18],
|
||||||
ATTR_CONDITION_RAINY: ["9", "10", "11", "12"],
|
ATTR_CONDITION_SUNNY: [1],
|
||||||
ATTR_CONDITION_SNOWY: ["22", "23", "24", "25", "26", "27"],
|
|
||||||
ATTR_CONDITION_SNOWY_RAINY: ["16", "17", "18"],
|
|
||||||
ATTR_CONDITION_SUNNY: ["1"],
|
|
||||||
ATTR_CONDITION_WINDY: [],
|
ATTR_CONDITION_WINDY: [],
|
||||||
ATTR_CONDITION_WINDY_VARIANT: [],
|
ATTR_CONDITION_WINDY_VARIANT: [],
|
||||||
ATTR_CONDITION_EXCEPTIONAL: [],
|
ATTR_CONDITION_EXCEPTIONAL: [],
|
||||||
@ -59,20 +67,28 @@ CONDITION_MAP = {
|
|||||||
for cond_code in cond_codes
|
for cond_code in cond_codes
|
||||||
}
|
}
|
||||||
|
|
||||||
VISIBILITY_CLASSES = {
|
HOURLY_FORECAST_ATTRIBUTE_MAP: dict[str, str] = {
|
||||||
"VP": "Very Poor",
|
ATTR_FORECAST_CONDITION: "significantWeatherCode",
|
||||||
"PO": "Poor",
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP: "feelsLikeTemperature",
|
||||||
"MO": "Moderate",
|
ATTR_FORECAST_NATIVE_PRESSURE: "mslp",
|
||||||
"GO": "Good",
|
ATTR_FORECAST_NATIVE_TEMP: "screenTemperature",
|
||||||
"VG": "Very Good",
|
ATTR_FORECAST_PRECIPITATION: "totalPrecipAmount",
|
||||||
"EX": "Excellent",
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY: "probOfPrecipitation",
|
||||||
|
ATTR_FORECAST_UV_INDEX: "uvIndex",
|
||||||
|
ATTR_FORECAST_WIND_BEARING: "windDirectionFrom10m",
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED: "windSpeed10m",
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: "windGustSpeed10m",
|
||||||
}
|
}
|
||||||
|
|
||||||
VISIBILITY_DISTANCE_CLASSES = {
|
DAILY_FORECAST_ATTRIBUTE_MAP: dict[str, str] = {
|
||||||
"VP": "<1",
|
ATTR_FORECAST_CONDITION: "daySignificantWeatherCode",
|
||||||
"PO": "1-4",
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP: "dayMaxFeelsLikeTemp",
|
||||||
"MO": "4-10",
|
ATTR_FORECAST_NATIVE_PRESSURE: "middayMslp",
|
||||||
"GO": "10-20",
|
ATTR_FORECAST_NATIVE_TEMP: "dayMaxScreenTemperature",
|
||||||
"VG": "20-40",
|
ATTR_FORECAST_NATIVE_TEMP_LOW: "nightMinScreenTemperature",
|
||||||
"EX": ">40",
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY: "dayProbabilityOfPrecipitation",
|
||||||
|
ATTR_FORECAST_UV_INDEX: "maxUvIndex",
|
||||||
|
ATTR_FORECAST_WIND_BEARING: "midday10MWindDirection",
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED: "midday10MWindSpeed",
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: "midday10MWindGust",
|
||||||
}
|
}
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
"""Common Met Office Data class used by both sensor and entity."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from datapoint.Forecast import Forecast
|
|
||||||
from datapoint.Site import Site
|
|
||||||
from datapoint.Timestep import Timestep
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class MetOfficeData:
|
|
||||||
"""Data structure for MetOffice weather and forecast."""
|
|
||||||
|
|
||||||
now: Forecast
|
|
||||||
forecast: list[Timestep]
|
|
||||||
site: Site
|
|
@ -3,51 +3,40 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any, Literal
|
||||||
|
|
||||||
import datapoint
|
import datapoint
|
||||||
from datapoint.Site import Site
|
from datapoint.Forecast import Forecast
|
||||||
|
from requests import HTTPError
|
||||||
|
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
from homeassistant.util.dt import utcnow
|
|
||||||
|
|
||||||
from .const import MODE_3HOURLY
|
|
||||||
from .data import MetOfficeData
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def fetch_site(
|
def fetch_data(
|
||||||
connection: datapoint.Manager, latitude: float, longitude: float
|
connection: datapoint.Manager,
|
||||||
) -> Site | None:
|
latitude: float,
|
||||||
"""Fetch site information from Datapoint API."""
|
longitude: float,
|
||||||
try:
|
frequency: Literal["daily", "twice-daily", "hourly"],
|
||||||
return connection.get_nearest_forecast_site(
|
) -> Forecast:
|
||||||
latitude=latitude, longitude=longitude
|
|
||||||
)
|
|
||||||
except datapoint.exceptions.APIException as err:
|
|
||||||
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_data(connection: datapoint.Manager, site: Site, mode: str) -> MetOfficeData:
|
|
||||||
"""Fetch weather and forecast from Datapoint API."""
|
"""Fetch weather and forecast from Datapoint API."""
|
||||||
try:
|
try:
|
||||||
forecast = connection.get_forecast_for_site(site.location_id, mode)
|
return connection.get_forecast(
|
||||||
|
latitude, longitude, frequency, convert_weather_code=False
|
||||||
|
)
|
||||||
except (ValueError, datapoint.exceptions.APIException) as err:
|
except (ValueError, datapoint.exceptions.APIException) as err:
|
||||||
_LOGGER.error("Check Met Office connection: %s", err.args)
|
_LOGGER.error("Check Met Office connection: %s", err.args)
|
||||||
raise UpdateFailed from err
|
raise UpdateFailed from err
|
||||||
|
except HTTPError as err:
|
||||||
|
if err.response.status_code == 401:
|
||||||
|
raise ConfigEntryAuthFailed from err
|
||||||
|
raise
|
||||||
|
|
||||||
time_now = utcnow()
|
|
||||||
return MetOfficeData(
|
def get_attribute(data: dict[str, Any] | None, attr_name: str) -> Any | None:
|
||||||
now=forecast.now(),
|
"""Get an attribute from weather data."""
|
||||||
forecast=[
|
if data:
|
||||||
timestep
|
return data.get(attr_name, {}).get("value")
|
||||||
for day in forecast.days
|
return None
|
||||||
for timestep in day.timesteps
|
|
||||||
if timestep.date > time_now
|
|
||||||
and (
|
|
||||||
mode == MODE_3HOURLY or timestep.date.hour > 6
|
|
||||||
) # ensures only one result per day in MODE_DAILY
|
|
||||||
],
|
|
||||||
site=site,
|
|
||||||
)
|
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/metoffice",
|
"documentation": "https://www.home-assistant.io/integrations/metoffice",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["datapoint"],
|
"loggers": ["datapoint"],
|
||||||
"requirements": ["datapoint==0.9.9"]
|
"requirements": ["datapoint==0.12.1"]
|
||||||
}
|
}
|
||||||
|
@ -2,11 +2,13 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from datapoint.Element import Element
|
from datapoint.Forecast import Forecast
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
|
DOMAIN as SENSOR_DOMAIN,
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
SensorEntityDescription,
|
SensorEntityDescription,
|
||||||
@ -20,6 +22,7 @@ from homeassistant.const import (
|
|||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
from homeassistant.helpers.update_coordinator import (
|
from homeassistant.helpers.update_coordinator import (
|
||||||
@ -33,105 +36,110 @@ from .const import (
|
|||||||
CONDITION_MAP,
|
CONDITION_MAP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
METOFFICE_COORDINATES,
|
METOFFICE_COORDINATES,
|
||||||
METOFFICE_DAILY_COORDINATOR,
|
|
||||||
METOFFICE_HOURLY_COORDINATOR,
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
MODE_DAILY,
|
|
||||||
VISIBILITY_CLASSES,
|
|
||||||
VISIBILITY_DISTANCE_CLASSES,
|
|
||||||
)
|
)
|
||||||
from .data import MetOfficeData
|
from .helpers import get_attribute
|
||||||
|
|
||||||
ATTR_LAST_UPDATE = "last_update"
|
ATTR_LAST_UPDATE = "last_update"
|
||||||
ATTR_SENSOR_ID = "sensor_id"
|
|
||||||
ATTR_SITE_ID = "site_id"
|
|
||||||
ATTR_SITE_NAME = "site_name"
|
|
||||||
|
|
||||||
|
|
||||||
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
@dataclass(frozen=True, kw_only=True)
|
||||||
SensorEntityDescription(
|
class MetOfficeSensorEntityDescription(SensorEntityDescription):
|
||||||
|
"""Entity description class for MetOffice sensors."""
|
||||||
|
|
||||||
|
native_attr_name: str
|
||||||
|
|
||||||
|
|
||||||
|
SENSOR_TYPES: tuple[MetOfficeSensorEntityDescription, ...] = (
|
||||||
|
MetOfficeSensorEntityDescription(
|
||||||
key="name",
|
key="name",
|
||||||
|
native_attr_name="name",
|
||||||
name="Station name",
|
name="Station name",
|
||||||
icon="mdi:label-outline",
|
icon="mdi:label-outline",
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="weather",
|
key="weather",
|
||||||
|
native_attr_name="significantWeatherCode",
|
||||||
name="Weather",
|
name="Weather",
|
||||||
icon="mdi:weather-sunny", # but will adapt to current conditions
|
icon="mdi:weather-sunny", # but will adapt to current conditions
|
||||||
entity_registry_enabled_default=True,
|
entity_registry_enabled_default=True,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="temperature",
|
key="temperature",
|
||||||
|
native_attr_name="screenTemperature",
|
||||||
name="Temperature",
|
name="Temperature",
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
icon=None,
|
icon=None,
|
||||||
entity_registry_enabled_default=True,
|
entity_registry_enabled_default=True,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="feels_like_temperature",
|
key="feels_like_temperature",
|
||||||
|
native_attr_name="feelsLikeTemperature",
|
||||||
name="Feels like temperature",
|
name="Feels like temperature",
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
icon=None,
|
icon=None,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="wind_speed",
|
key="wind_speed",
|
||||||
|
native_attr_name="windSpeed10m",
|
||||||
name="Wind speed",
|
name="Wind speed",
|
||||||
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
||||||
# Hint mph because that's the preferred unit for wind speeds in UK
|
# Hint mph because that's the preferred unit for wind speeds in UK
|
||||||
# This can be removed if we add a mixed metric/imperial unit system for UK users
|
# This can be removed if we add a mixed metric/imperial unit system for UK users
|
||||||
suggested_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
suggested_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.WIND_SPEED,
|
device_class=SensorDeviceClass.WIND_SPEED,
|
||||||
entity_registry_enabled_default=True,
|
entity_registry_enabled_default=True,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="wind_direction",
|
key="wind_direction",
|
||||||
|
native_attr_name="windDirectionFrom10m",
|
||||||
name="Wind direction",
|
name="Wind direction",
|
||||||
icon="mdi:compass-outline",
|
icon="mdi:compass-outline",
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="wind_gust",
|
key="wind_gust",
|
||||||
|
native_attr_name="windGustSpeed10m",
|
||||||
name="Wind gust",
|
name="Wind gust",
|
||||||
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
||||||
# Hint mph because that's the preferred unit for wind speeds in UK
|
# Hint mph because that's the preferred unit for wind speeds in UK
|
||||||
# This can be removed if we add a mixed metric/imperial unit system for UK users
|
# This can be removed if we add a mixed metric/imperial unit system for UK users
|
||||||
suggested_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
suggested_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.WIND_SPEED,
|
device_class=SensorDeviceClass.WIND_SPEED,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="visibility",
|
key="visibility",
|
||||||
name="Visibility",
|
native_attr_name="visibility",
|
||||||
icon="mdi:eye",
|
|
||||||
entity_registry_enabled_default=False,
|
|
||||||
),
|
|
||||||
SensorEntityDescription(
|
|
||||||
key="visibility_distance",
|
|
||||||
name="Visibility distance",
|
name="Visibility distance",
|
||||||
native_unit_of_measurement=UnitOfLength.KILOMETERS,
|
native_unit_of_measurement=UnitOfLength.METERS,
|
||||||
icon="mdi:eye",
|
icon="mdi:eye",
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="uv",
|
key="uv",
|
||||||
|
native_attr_name="uvIndex",
|
||||||
name="UV index",
|
name="UV index",
|
||||||
native_unit_of_measurement=UV_INDEX,
|
native_unit_of_measurement=UV_INDEX,
|
||||||
icon="mdi:weather-sunny-alert",
|
icon="mdi:weather-sunny-alert",
|
||||||
entity_registry_enabled_default=True,
|
entity_registry_enabled_default=True,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="precipitation",
|
key="precipitation",
|
||||||
|
native_attr_name="probOfPrecipitation",
|
||||||
name="Probability of precipitation",
|
name="Probability of precipitation",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
icon="mdi:weather-rainy",
|
icon="mdi:weather-rainy",
|
||||||
entity_registry_enabled_default=True,
|
entity_registry_enabled_default=True,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
MetOfficeSensorEntityDescription(
|
||||||
key="humidity",
|
key="humidity",
|
||||||
|
native_attr_name="screenRelativeHumidity",
|
||||||
name="Humidity",
|
name="Humidity",
|
||||||
device_class=SensorDeviceClass.HUMIDITY,
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
@ -147,23 +155,37 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Met Office weather sensor platform."""
|
"""Set up the Met Office weather sensor platform."""
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
hass_data = hass.data[DOMAIN][entry.entry_id]
|
hass_data = hass.data[DOMAIN][entry.entry_id]
|
||||||
|
|
||||||
|
# Remove daily entities from legacy config entries
|
||||||
|
for description in SENSOR_TYPES:
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
f"{description.key}_{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||||
|
):
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
|
||||||
|
# Remove old visibility sensors
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
f"visibility_distance_{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||||
|
):
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
f"visibility_distance_{hass_data[METOFFICE_COORDINATES]}",
|
||||||
|
):
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
MetOfficeCurrentSensor(
|
MetOfficeCurrentSensor(
|
||||||
hass_data[METOFFICE_HOURLY_COORDINATOR],
|
hass_data[METOFFICE_HOURLY_COORDINATOR],
|
||||||
hass_data,
|
hass_data,
|
||||||
True,
|
|
||||||
description,
|
|
||||||
)
|
|
||||||
for description in SENSOR_TYPES
|
|
||||||
]
|
|
||||||
+ [
|
|
||||||
MetOfficeCurrentSensor(
|
|
||||||
hass_data[METOFFICE_DAILY_COORDINATOR],
|
|
||||||
hass_data,
|
|
||||||
False,
|
|
||||||
description,
|
description,
|
||||||
)
|
)
|
||||||
for description in SENSOR_TYPES
|
for description in SENSOR_TYPES
|
||||||
@ -173,64 +195,43 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
|
|
||||||
class MetOfficeCurrentSensor(
|
class MetOfficeCurrentSensor(
|
||||||
CoordinatorEntity[DataUpdateCoordinator[MetOfficeData]], SensorEntity
|
CoordinatorEntity[DataUpdateCoordinator[Forecast]], SensorEntity
|
||||||
):
|
):
|
||||||
"""Implementation of a Met Office current weather condition sensor."""
|
"""Implementation of a Met Office current weather condition sensor."""
|
||||||
|
|
||||||
_attr_attribution = ATTRIBUTION
|
_attr_attribution = ATTRIBUTION
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
entity_description: MetOfficeSensorEntityDescription
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: DataUpdateCoordinator[MetOfficeData],
|
coordinator: DataUpdateCoordinator[Forecast],
|
||||||
hass_data: dict[str, Any],
|
hass_data: dict[str, Any],
|
||||||
use_3hourly: bool,
|
description: MetOfficeSensorEntityDescription,
|
||||||
description: SensorEntityDescription,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
mode_label = "3-hourly" if use_3hourly else "daily"
|
|
||||||
|
|
||||||
self._attr_device_info = get_device_info(
|
self._attr_device_info = get_device_info(
|
||||||
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
||||||
)
|
)
|
||||||
self._attr_name = f"{description.name} {mode_label}"
|
|
||||||
self._attr_unique_id = f"{description.key}_{hass_data[METOFFICE_COORDINATES]}"
|
self._attr_unique_id = f"{description.key}_{hass_data[METOFFICE_COORDINATES]}"
|
||||||
if not use_3hourly:
|
|
||||||
self._attr_unique_id = f"{self._attr_unique_id}_{MODE_DAILY}"
|
|
||||||
self._attr_entity_registry_enabled_default = (
|
|
||||||
self.entity_description.entity_registry_enabled_default and use_3hourly
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
"""Return the state of the sensor."""
|
"""Return the state of the sensor."""
|
||||||
value = None
|
value = get_attribute(
|
||||||
|
self.coordinator.data.now(), self.entity_description.native_attr_name
|
||||||
|
)
|
||||||
|
|
||||||
if self.entity_description.key == "visibility_distance" and hasattr(
|
if (
|
||||||
self.coordinator.data.now, "visibility"
|
self.entity_description.native_attr_name == "significantWeatherCode"
|
||||||
|
and value
|
||||||
):
|
):
|
||||||
value = VISIBILITY_DISTANCE_CLASSES.get(
|
value = CONDITION_MAP.get(value)
|
||||||
self.coordinator.data.now.visibility.value
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.entity_description.key == "visibility" and hasattr(
|
|
||||||
self.coordinator.data.now, "visibility"
|
|
||||||
):
|
|
||||||
value = VISIBILITY_CLASSES.get(self.coordinator.data.now.visibility.value)
|
|
||||||
|
|
||||||
elif self.entity_description.key == "weather" and hasattr(
|
|
||||||
self.coordinator.data.now, self.entity_description.key
|
|
||||||
):
|
|
||||||
value = CONDITION_MAP.get(self.coordinator.data.now.weather.value)
|
|
||||||
|
|
||||||
elif hasattr(self.coordinator.data.now, self.entity_description.key):
|
|
||||||
value = getattr(self.coordinator.data.now, self.entity_description.key)
|
|
||||||
|
|
||||||
if isinstance(value, Element):
|
|
||||||
value = value.value
|
|
||||||
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -238,7 +239,7 @@ class MetOfficeCurrentSensor(
|
|||||||
def icon(self) -> str | None:
|
def icon(self) -> str | None:
|
||||||
"""Return the icon for the entity card."""
|
"""Return the icon for the entity card."""
|
||||||
value = self.entity_description.icon
|
value = self.entity_description.icon
|
||||||
if self.entity_description.key == "weather":
|
if self.entity_description.native_attr_name == "significantWeatherCode":
|
||||||
value = self.state
|
value = self.state
|
||||||
if value is None:
|
if value is None:
|
||||||
value = "sunny"
|
value = "sunny"
|
||||||
@ -252,8 +253,5 @@ class MetOfficeCurrentSensor(
|
|||||||
def extra_state_attributes(self) -> dict[str, Any]:
|
def extra_state_attributes(self) -> dict[str, Any]:
|
||||||
"""Return the state attributes of the device."""
|
"""Return the state attributes of the device."""
|
||||||
return {
|
return {
|
||||||
ATTR_LAST_UPDATE: self.coordinator.data.now.date,
|
ATTR_LAST_UPDATE: self.coordinator.data.now()["time"],
|
||||||
ATTR_SENSOR_ID: self.entity_description.key,
|
|
||||||
ATTR_SITE_ID: self.coordinator.data.site.location_id,
|
|
||||||
ATTR_SITE_NAME: self.coordinator.data.site.name,
|
|
||||||
}
|
}
|
||||||
|
@ -2,21 +2,29 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"description": "The latitude and longitude will be used to find the closest weather station.",
|
|
||||||
"title": "Connect to the UK Met Office",
|
"title": "Connect to the UK Met Office",
|
||||||
"data": {
|
"data": {
|
||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||||
"longitude": "[%key:common::config_flow::data::longitude%]"
|
"longitude": "[%key:common::config_flow::data::longitude%]"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"title": "Reauthenticate with DataHub API",
|
||||||
|
"description": "Please re-enter you DataHub API key. If you are still using an old Datapoint API key, you need to sign up for DataHub API now, see [documentation]({docs_url}) for details.",
|
||||||
|
"data": {
|
||||||
|
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,15 +2,22 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from datapoint.Timestep import Timestep
|
from datapoint.Forecast import Forecast as ForecastData
|
||||||
|
|
||||||
from homeassistant.components.weather import (
|
from homeassistant.components.weather import (
|
||||||
ATTR_FORECAST_CONDITION,
|
ATTR_FORECAST_CONDITION,
|
||||||
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP,
|
||||||
|
ATTR_FORECAST_NATIVE_PRESSURE,
|
||||||
ATTR_FORECAST_NATIVE_TEMP,
|
ATTR_FORECAST_NATIVE_TEMP,
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP_LOW,
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
|
||||||
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
ATTR_FORECAST_NATIVE_WIND_SPEED,
|
||||||
|
ATTR_FORECAST_PRECIPITATION,
|
||||||
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
|
||||||
|
ATTR_FORECAST_UV_INDEX,
|
||||||
ATTR_FORECAST_WIND_BEARING,
|
ATTR_FORECAST_WIND_BEARING,
|
||||||
DOMAIN as WEATHER_DOMAIN,
|
DOMAIN as WEATHER_DOMAIN,
|
||||||
CoordinatorWeatherEntity,
|
CoordinatorWeatherEntity,
|
||||||
@ -18,7 +25,12 @@ from homeassistant.components.weather import (
|
|||||||
WeatherEntityFeature,
|
WeatherEntityFeature,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import UnitOfPressure, UnitOfSpeed, UnitOfTemperature
|
from homeassistant.const import (
|
||||||
|
UnitOfLength,
|
||||||
|
UnitOfPressure,
|
||||||
|
UnitOfSpeed,
|
||||||
|
UnitOfTemperature,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@ -28,14 +40,15 @@ from . import get_device_info
|
|||||||
from .const import (
|
from .const import (
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
CONDITION_MAP,
|
CONDITION_MAP,
|
||||||
|
DAILY_FORECAST_ATTRIBUTE_MAP,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
HOURLY_FORECAST_ATTRIBUTE_MAP,
|
||||||
METOFFICE_COORDINATES,
|
METOFFICE_COORDINATES,
|
||||||
METOFFICE_DAILY_COORDINATOR,
|
METOFFICE_DAILY_COORDINATOR,
|
||||||
METOFFICE_HOURLY_COORDINATOR,
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
MODE_DAILY,
|
|
||||||
)
|
)
|
||||||
from .data import MetOfficeData
|
from .helpers import get_attribute
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@ -47,11 +60,11 @@ async def async_setup_entry(
|
|||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
hass_data = hass.data[DOMAIN][entry.entry_id]
|
hass_data = hass.data[DOMAIN][entry.entry_id]
|
||||||
|
|
||||||
# Remove hourly entity from legacy config entries
|
# Remove daily entity from legacy config entries
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
WEATHER_DOMAIN,
|
WEATHER_DOMAIN,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
_calculate_unique_id(hass_data[METOFFICE_COORDINATES], True),
|
f"{hass_data[METOFFICE_COORDINATES]}_daily",
|
||||||
):
|
):
|
||||||
entity_registry.async_remove(entity_id)
|
entity_registry.async_remove(entity_id)
|
||||||
|
|
||||||
@ -67,54 +80,89 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _build_forecast_data(timestep: Timestep) -> Forecast:
|
def _build_hourly_forecast_data(timestep: dict[str, Any]) -> Forecast:
|
||||||
data = Forecast(datetime=timestep.date.isoformat())
|
data = Forecast(datetime=timestep["time"].isoformat())
|
||||||
if timestep.weather:
|
_populate_forecast_data(data, timestep, HOURLY_FORECAST_ATTRIBUTE_MAP)
|
||||||
data[ATTR_FORECAST_CONDITION] = CONDITION_MAP.get(timestep.weather.value)
|
|
||||||
if timestep.precipitation:
|
|
||||||
data[ATTR_FORECAST_PRECIPITATION_PROBABILITY] = timestep.precipitation.value
|
|
||||||
if timestep.temperature:
|
|
||||||
data[ATTR_FORECAST_NATIVE_TEMP] = timestep.temperature.value
|
|
||||||
if timestep.wind_direction:
|
|
||||||
data[ATTR_FORECAST_WIND_BEARING] = timestep.wind_direction.value
|
|
||||||
if timestep.wind_speed:
|
|
||||||
data[ATTR_FORECAST_NATIVE_WIND_SPEED] = timestep.wind_speed.value
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def _calculate_unique_id(coordinates: str, use_3hourly: bool) -> str:
|
def _build_daily_forecast_data(timestep: dict[str, Any]) -> Forecast:
|
||||||
"""Calculate unique ID."""
|
data = Forecast(datetime=timestep["time"].isoformat())
|
||||||
if use_3hourly:
|
_populate_forecast_data(data, timestep, DAILY_FORECAST_ATTRIBUTE_MAP)
|
||||||
return coordinates
|
return data
|
||||||
return f"{coordinates}_{MODE_DAILY}"
|
|
||||||
|
|
||||||
|
def _populate_forecast_data(
|
||||||
|
forecast: Forecast, timestep: dict[str, Any], mapping: dict[str, str]
|
||||||
|
) -> None:
|
||||||
|
def get_mapped_attribute(attr: str) -> Any:
|
||||||
|
if attr not in mapping:
|
||||||
|
return None
|
||||||
|
return get_attribute(timestep, mapping[attr])
|
||||||
|
|
||||||
|
weather_code = get_mapped_attribute(ATTR_FORECAST_CONDITION)
|
||||||
|
if weather_code is not None:
|
||||||
|
forecast[ATTR_FORECAST_CONDITION] = CONDITION_MAP.get(weather_code)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_APPARENT_TEMP] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_APPARENT_TEMP
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_PRESSURE] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_PRESSURE
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_TEMP] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_TEMP_LOW] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_TEMP_LOW
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_PRECIPITATION] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_PRECIPITATION
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_PRECIPITATION_PROBABILITY] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_PRECIPITATION_PROBABILITY
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_UV_INDEX] = get_mapped_attribute(ATTR_FORECAST_UV_INDEX)
|
||||||
|
forecast[ATTR_FORECAST_WIND_BEARING] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_WIND_BEARING
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_WIND_SPEED] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_SPEED
|
||||||
|
)
|
||||||
|
forecast[ATTR_FORECAST_NATIVE_WIND_GUST_SPEED] = get_mapped_attribute(
|
||||||
|
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeWeather(
|
class MetOfficeWeather(
|
||||||
CoordinatorWeatherEntity[
|
CoordinatorWeatherEntity[
|
||||||
TimestampDataUpdateCoordinator[MetOfficeData],
|
TimestampDataUpdateCoordinator[ForecastData],
|
||||||
TimestampDataUpdateCoordinator[MetOfficeData],
|
TimestampDataUpdateCoordinator[ForecastData],
|
||||||
|
TimestampDataUpdateCoordinator[ForecastData],
|
||||||
]
|
]
|
||||||
):
|
):
|
||||||
"""Implementation of a Met Office weather condition."""
|
"""Implementation of a Met Office weather condition."""
|
||||||
|
|
||||||
_attr_attribution = ATTRIBUTION
|
_attr_attribution = ATTRIBUTION
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
_attr_name = None
|
||||||
|
|
||||||
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
_attr_native_pressure_unit = UnitOfPressure.HPA
|
_attr_native_pressure_unit = UnitOfPressure.PA
|
||||||
_attr_native_wind_speed_unit = UnitOfSpeed.MILES_PER_HOUR
|
_attr_native_precipitation_unit = UnitOfLength.MILLIMETERS
|
||||||
|
_attr_native_visibility_unit = UnitOfLength.METERS
|
||||||
|
_attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND
|
||||||
_attr_supported_features = (
|
_attr_supported_features = (
|
||||||
WeatherEntityFeature.FORECAST_HOURLY | WeatherEntityFeature.FORECAST_DAILY
|
WeatherEntityFeature.FORECAST_HOURLY | WeatherEntityFeature.FORECAST_DAILY
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator_daily: TimestampDataUpdateCoordinator[MetOfficeData],
|
coordinator_daily: TimestampDataUpdateCoordinator[ForecastData],
|
||||||
coordinator_hourly: TimestampDataUpdateCoordinator[MetOfficeData],
|
coordinator_hourly: TimestampDataUpdateCoordinator[ForecastData],
|
||||||
hass_data: dict[str, Any],
|
hass_data: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialise the platform with a data instance."""
|
"""Initialise the platform with a data instance."""
|
||||||
observation_coordinator = coordinator_daily
|
observation_coordinator = coordinator_hourly
|
||||||
super().__init__(
|
super().__init__(
|
||||||
observation_coordinator,
|
observation_coordinator,
|
||||||
daily_coordinator=coordinator_daily,
|
daily_coordinator=coordinator_daily,
|
||||||
@ -124,81 +172,99 @@ class MetOfficeWeather(
|
|||||||
self._attr_device_info = get_device_info(
|
self._attr_device_info = get_device_info(
|
||||||
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
coordinates=hass_data[METOFFICE_COORDINATES], name=hass_data[METOFFICE_NAME]
|
||||||
)
|
)
|
||||||
self._attr_name = "Daily"
|
self._attr_unique_id = hass_data[METOFFICE_COORDINATES]
|
||||||
self._attr_unique_id = _calculate_unique_id(
|
|
||||||
hass_data[METOFFICE_COORDINATES], False
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def condition(self) -> str | None:
|
def condition(self) -> str | None:
|
||||||
"""Return the current condition."""
|
"""Return the current condition."""
|
||||||
if self.coordinator.data.now:
|
weather_now = self.coordinator.data.now()
|
||||||
return CONDITION_MAP.get(self.coordinator.data.now.weather.value)
|
value = get_attribute(weather_now, "significantWeatherCode")
|
||||||
|
|
||||||
|
if value:
|
||||||
|
return CONDITION_MAP.get(value)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_temperature(self) -> float | None:
|
def native_temperature(self) -> float | None:
|
||||||
"""Return the platform temperature."""
|
"""Return the platform temperature."""
|
||||||
weather_now = self.coordinator.data.now
|
weather_now = self.coordinator.data.now()
|
||||||
if weather_now.temperature:
|
value = get_attribute(weather_now, "screenTemperature")
|
||||||
value = weather_now.temperature.value
|
return float(value) if value is not None else None
|
||||||
return float(value) if value is not None else None
|
|
||||||
return None
|
@property
|
||||||
|
def native_dew_point(self) -> float | None:
|
||||||
|
"""Return the dew point."""
|
||||||
|
weather_now = self.coordinator.data.now()
|
||||||
|
value = get_attribute(weather_now, "screenDewPointTemperature")
|
||||||
|
return float(value) if value is not None else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_pressure(self) -> float | None:
|
def native_pressure(self) -> float | None:
|
||||||
"""Return the mean sea-level pressure."""
|
"""Return the mean sea-level pressure."""
|
||||||
weather_now = self.coordinator.data.now
|
weather_now = self.coordinator.data.now()
|
||||||
if weather_now and weather_now.pressure:
|
value = get_attribute(weather_now, "mslp")
|
||||||
value = weather_now.pressure.value
|
return float(value) if value is not None else None
|
||||||
return float(value) if value is not None else None
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def humidity(self) -> float | None:
|
def humidity(self) -> float | None:
|
||||||
"""Return the relative humidity."""
|
"""Return the relative humidity."""
|
||||||
weather_now = self.coordinator.data.now
|
weather_now = self.coordinator.data.now()
|
||||||
if weather_now and weather_now.humidity:
|
value = get_attribute(weather_now, "screenRelativeHumidity")
|
||||||
value = weather_now.humidity.value
|
return float(value) if value is not None else None
|
||||||
return float(value) if value is not None else None
|
|
||||||
return None
|
@property
|
||||||
|
def uv_index(self) -> float | None:
|
||||||
|
"""Return the UV index."""
|
||||||
|
weather_now = self.coordinator.data.now()
|
||||||
|
value = get_attribute(weather_now, "uvIndex")
|
||||||
|
return float(value) if value is not None else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_visibility(self) -> float | None:
|
||||||
|
"""Return the visibility."""
|
||||||
|
weather_now = self.coordinator.data.now()
|
||||||
|
value = get_attribute(weather_now, "visibility")
|
||||||
|
return float(value) if value is not None else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def native_wind_speed(self) -> float | None:
|
def native_wind_speed(self) -> float | None:
|
||||||
"""Return the wind speed."""
|
"""Return the wind speed."""
|
||||||
weather_now = self.coordinator.data.now
|
weather_now = self.coordinator.data.now()
|
||||||
if weather_now and weather_now.wind_speed:
|
value = get_attribute(weather_now, "windSpeed10m")
|
||||||
value = weather_now.wind_speed.value
|
return float(value) if value is not None else None
|
||||||
return float(value) if value is not None else None
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def wind_bearing(self) -> str | None:
|
def wind_bearing(self) -> float | None:
|
||||||
"""Return the wind bearing."""
|
"""Return the wind bearing."""
|
||||||
weather_now = self.coordinator.data.now
|
weather_now = self.coordinator.data.now()
|
||||||
if weather_now and weather_now.wind_direction:
|
value = get_attribute(weather_now, "windDirectionFrom10m")
|
||||||
value = weather_now.wind_direction.value
|
return float(value) if value is not None else None
|
||||||
return str(value) if value is not None else None
|
|
||||||
return None
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_forecast_daily(self) -> list[Forecast] | None:
|
def _async_forecast_daily(self) -> list[Forecast] | None:
|
||||||
"""Return the twice daily forecast in native units."""
|
"""Return the daily forecast in native units."""
|
||||||
coordinator = cast(
|
coordinator = cast(
|
||||||
TimestampDataUpdateCoordinator[MetOfficeData],
|
TimestampDataUpdateCoordinator[ForecastData],
|
||||||
self.forecast_coordinators["daily"],
|
self.forecast_coordinators["daily"],
|
||||||
)
|
)
|
||||||
|
timesteps = coordinator.data.timesteps
|
||||||
return [
|
return [
|
||||||
_build_forecast_data(timestep) for timestep in coordinator.data.forecast
|
_build_daily_forecast_data(timestep)
|
||||||
|
for timestep in timesteps
|
||||||
|
if timestep["time"] > datetime.now(tz=timesteps[0]["time"].tzinfo)
|
||||||
]
|
]
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
def _async_forecast_hourly(self) -> list[Forecast] | None:
|
||||||
"""Return the hourly forecast in native units."""
|
"""Return the hourly forecast in native units."""
|
||||||
coordinator = cast(
|
coordinator = cast(
|
||||||
TimestampDataUpdateCoordinator[MetOfficeData],
|
TimestampDataUpdateCoordinator[ForecastData],
|
||||||
self.forecast_coordinators["hourly"],
|
self.forecast_coordinators["hourly"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
timesteps = coordinator.data.timesteps
|
||||||
return [
|
return [
|
||||||
_build_forecast_data(timestep) for timestep in coordinator.data.forecast
|
_build_hourly_forecast_data(timestep)
|
||||||
|
for timestep in timesteps
|
||||||
|
if timestep["time"] > datetime.now(tz=timesteps[0]["time"].tzinfo)
|
||||||
]
|
]
|
||||||
|
2
requirements_all.txt
generated
2
requirements_all.txt
generated
@ -747,7 +747,7 @@ crownstone-uart==2.1.0
|
|||||||
datadog==0.15.0
|
datadog==0.15.0
|
||||||
|
|
||||||
# homeassistant.components.metoffice
|
# homeassistant.components.metoffice
|
||||||
datapoint==0.9.9
|
datapoint==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.bluetooth
|
# homeassistant.components.bluetooth
|
||||||
dbus-fast==2.43.0
|
dbus-fast==2.43.0
|
||||||
|
2
requirements_test_all.txt
generated
2
requirements_test_all.txt
generated
@ -644,7 +644,7 @@ crownstone-uart==2.1.0
|
|||||||
datadog==0.15.0
|
datadog==0.15.0
|
||||||
|
|
||||||
# homeassistant.components.metoffice
|
# homeassistant.components.metoffice
|
||||||
datapoint==0.9.9
|
datapoint==0.12.1
|
||||||
|
|
||||||
# homeassistant.components.bluetooth
|
# homeassistant.components.bluetooth
|
||||||
dbus-fast==2.43.0
|
dbus-fast==2.43.0
|
||||||
|
@ -9,10 +9,9 @@ import pytest
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_simple_manager_fail():
|
def mock_simple_manager_fail():
|
||||||
"""Mock datapoint Manager with default values for testing in config_flow."""
|
"""Mock datapoint Manager with default values for testing in config_flow."""
|
||||||
with patch("datapoint.Manager") as mock_manager:
|
with patch("datapoint.Manager.Manager") as mock_manager:
|
||||||
instance = mock_manager.return_value
|
instance = mock_manager.return_value
|
||||||
instance.get_nearest_forecast_site.side_effect = APIException()
|
instance.get_forecast = APIException()
|
||||||
instance.get_forecast_for_site.side_effect = APIException()
|
|
||||||
instance.latitude = None
|
instance.latitude = None
|
||||||
instance.longitude = None
|
instance.longitude = None
|
||||||
instance.site = None
|
instance.site = None
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
from homeassistant.components.metoffice.const import DOMAIN
|
from homeassistant.components.metoffice.const import DOMAIN
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||||
|
|
||||||
TEST_DATETIME_STRING = "2020-04-25T12:00:00+00:00"
|
TEST_DATETIME_STRING = "2024-11-23T12:00:00+00:00"
|
||||||
|
|
||||||
TEST_API_KEY = "test-metoffice-api-key"
|
TEST_API_KEY = "test-metoffice-api-key"
|
||||||
|
|
||||||
@ -34,31 +34,21 @@ METOFFICE_CONFIG_KINGSLYNN = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
KINGSLYNN_SENSOR_RESULTS = {
|
KINGSLYNN_SENSOR_RESULTS = {
|
||||||
"weather": ("weather", "sunny"),
|
"weather": "rainy",
|
||||||
"visibility": ("visibility", "Very Good"),
|
"temperature": "7.87",
|
||||||
"visibility_distance": ("visibility_distance", "20-40"),
|
"uv_index": "1",
|
||||||
"temperature": ("temperature", "14"),
|
"probability_of_precipitation": "67",
|
||||||
"feels_like_temperature": ("feels_like_temperature", "13"),
|
"pressure": "998.20",
|
||||||
"uv": ("uv_index", "6"),
|
"wind_speed": "22.21",
|
||||||
"precipitation": ("probability_of_precipitation", "0"),
|
|
||||||
"wind_direction": ("wind_direction", "E"),
|
|
||||||
"wind_gust": ("wind_gust", "7"),
|
|
||||||
"wind_speed": ("wind_speed", "2"),
|
|
||||||
"humidity": ("humidity", "60"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
WAVERTREE_SENSOR_RESULTS = {
|
WAVERTREE_SENSOR_RESULTS = {
|
||||||
"weather": ("weather", "sunny"),
|
"weather": "rainy",
|
||||||
"visibility": ("visibility", "Good"),
|
"temperature": "9.28",
|
||||||
"visibility_distance": ("visibility_distance", "10-20"),
|
"uv_index": "1",
|
||||||
"temperature": ("temperature", "17"),
|
"probability_of_precipitation": "61",
|
||||||
"feels_like_temperature": ("feels_like_temperature", "14"),
|
"pressure": "987.50",
|
||||||
"uv": ("uv_index", "5"),
|
"wind_speed": "17.60",
|
||||||
"precipitation": ("probability_of_precipitation", "0"),
|
|
||||||
"wind_direction": ("wind_direction", "SSE"),
|
|
||||||
"wind_gust": ("wind_gust", "16"),
|
|
||||||
"wind_speed": ("wind_speed", "9"),
|
|
||||||
"humidity": ("humidity", "50"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DEVICE_KEY_KINGSLYNN = {(DOMAIN, TEST_COORDINATES_KINGSLYNN)}
|
DEVICE_KEY_KINGSLYNN = {(DOMAIN, TEST_COORDINATES_KINGSLYNN)}
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,14 +1,18 @@
|
|||||||
"""Test the National Weather Service (NWS) config flow."""
|
"""Test the MetOffice config flow."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
import json
|
import json
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
import requests_mock
|
import requests_mock
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.metoffice.const import DOMAIN
|
from homeassistant.components.metoffice.const import DOMAIN
|
||||||
|
from homeassistant.const import CONF_API_KEY
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.data_entry_flow import FlowResultType
|
from homeassistant.data_entry_flow import FlowResultType
|
||||||
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
METOFFICE_CONFIG_WAVERTREE,
|
METOFFICE_CONFIG_WAVERTREE,
|
||||||
@ -28,8 +32,11 @@ async def test_form(hass: HomeAssistant, requests_mock: requests_mock.Mocker) ->
|
|||||||
|
|
||||||
# all metoffice test data encapsulated in here
|
# all metoffice test data encapsulated in here
|
||||||
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
@ -66,17 +73,10 @@ async def test_form_already_configured(
|
|||||||
|
|
||||||
# all metoffice test data encapsulated in here
|
# all metoffice test data encapsulated in here
|
||||||
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
text="",
|
text=wavertree_daily,
|
||||||
)
|
|
||||||
requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
|
||||||
text="",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
MockConfigEntry(
|
MockConfigEntry(
|
||||||
@ -102,7 +102,9 @@ async def test_form_cannot_connect(
|
|||||||
hass.config.latitude = TEST_LATITUDE_WAVERTREE
|
hass.config.latitude = TEST_LATITUDE_WAVERTREE
|
||||||
hass.config.longitude = TEST_LONGITUDE_WAVERTREE
|
hass.config.longitude = TEST_LONGITUDE_WAVERTREE
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily", text=""
|
||||||
|
)
|
||||||
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
@ -122,7 +124,7 @@ async def test_form_unknown_error(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test we handle unknown error."""
|
"""Test we handle unknown error."""
|
||||||
mock_instance = mock_simple_manager_fail.return_value
|
mock_instance = mock_simple_manager_fail.return_value
|
||||||
mock_instance.get_nearest_forecast_site.side_effect = ValueError
|
mock_instance.get_forecast.side_effect = ValueError
|
||||||
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
@ -135,3 +137,77 @@ async def test_form_unknown_error(
|
|||||||
|
|
||||||
assert result2["type"] is FlowResultType.FORM
|
assert result2["type"] is FlowResultType.FORM
|
||||||
assert result2["errors"] == {"base": "unknown"}
|
assert result2["errors"] == {"base": "unknown"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
|
async def test_reauth_flow(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
requests_mock: requests_mock.Mocker,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
|
) -> None:
|
||||||
|
"""Test handling authentication errors and reauth flow."""
|
||||||
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=wavertree_hourly,
|
||||||
|
)
|
||||||
|
|
||||||
|
entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data=METOFFICE_CONFIG_WAVERTREE,
|
||||||
|
)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert len(device_registry.devices) == 1
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text="",
|
||||||
|
status_code=401,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text="",
|
||||||
|
status_code=401,
|
||||||
|
)
|
||||||
|
|
||||||
|
await entry.start_reauth_flow(hass)
|
||||||
|
|
||||||
|
flows = hass.config_entries.flow.async_progress()
|
||||||
|
assert len(flows) == 1
|
||||||
|
assert flows[0]["step_id"] == "reauth_confirm"
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
flows[0]["flow_id"],
|
||||||
|
{CONF_API_KEY: TEST_API_KEY},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.FORM
|
||||||
|
assert result["errors"] == {"base": "invalid_auth"}
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=wavertree_hourly,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await hass.config_entries.flow.async_configure(
|
||||||
|
flows[0]["flow_id"],
|
||||||
|
{CONF_API_KEY: TEST_API_KEY},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert result["type"] is FlowResultType.ABORT
|
||||||
|
assert result["reason"] == "reauth_successful"
|
||||||
|
@ -1,129 +1,65 @@
|
|||||||
"""Tests for metoffice init."""
|
"""Tests for metoffice init."""
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
import requests_mock
|
||||||
|
|
||||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
from homeassistant.components.metoffice.const import DOMAIN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import device_registry as dr
|
||||||
|
from homeassistant.util import utcnow
|
||||||
|
|
||||||
from .const import DOMAIN, METOFFICE_CONFIG_WAVERTREE, TEST_COORDINATES_WAVERTREE
|
from .const import METOFFICE_CONFIG_WAVERTREE
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
@pytest.mark.parametrize(
|
async def test_reauth_on_auth_error(
|
||||||
("old_unique_id", "new_unique_id", "migration_needed"),
|
|
||||||
[
|
|
||||||
(
|
|
||||||
f"Station Name_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"name_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Weather_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"weather_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Temperature_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"temperature_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Feels Like Temperature_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"feels_like_temperature_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Wind Speed_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"wind_speed_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Wind Direction_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"wind_direction_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Wind Gust_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"wind_gust_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Visibility_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"visibility_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Visibility Distance_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"visibility_distance_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"UV Index_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"uv_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Probability of Precipitation_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"precipitation_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"Humidity_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"humidity_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
True,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
f"name_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
f"name_{TEST_COORDINATES_WAVERTREE}",
|
|
||||||
False,
|
|
||||||
),
|
|
||||||
("abcde", "abcde", False),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
async def test_migrate_unique_id(
|
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
entity_registry: er.EntityRegistry,
|
|
||||||
old_unique_id: str,
|
|
||||||
new_unique_id: str,
|
|
||||||
migration_needed: bool,
|
|
||||||
requests_mock: requests_mock.Mocker,
|
requests_mock: requests_mock.Mocker,
|
||||||
|
device_registry: dr.DeviceRegistry,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test unique id migration."""
|
"""Test handling authentication errors and reauth flow."""
|
||||||
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=wavertree_hourly,
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data=METOFFICE_CONFIG_WAVERTREE,
|
data=METOFFICE_CONFIG_WAVERTREE,
|
||||||
)
|
)
|
||||||
entry.add_to_hass(hass)
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
entity: er.RegistryEntry = entity_registry.async_get_or_create(
|
|
||||||
suggested_object_id="my_sensor",
|
|
||||||
disabled_by=None,
|
|
||||||
domain=SENSOR_DOMAIN,
|
|
||||||
platform=DOMAIN,
|
|
||||||
unique_id=old_unique_id,
|
|
||||||
config_entry=entry,
|
|
||||||
)
|
|
||||||
assert entity.unique_id == old_unique_id
|
|
||||||
|
|
||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
if migration_needed:
|
assert len(device_registry.devices) == 1
|
||||||
assert (
|
|
||||||
entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id)
|
|
||||||
is None
|
|
||||||
)
|
|
||||||
|
|
||||||
assert (
|
requests_mock.get(
|
||||||
entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, new_unique_id)
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
== "sensor.my_sensor"
|
text="",
|
||||||
|
status_code=401,
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text="",
|
||||||
|
status_code=401,
|
||||||
|
)
|
||||||
|
|
||||||
|
future_time = utcnow() + datetime.timedelta(minutes=40)
|
||||||
|
async_fire_time_changed(hass, future_time)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
flows = hass.config_entries.flow.async_progress()
|
||||||
|
assert len(flows) == 1
|
||||||
|
assert flows[0]["step_id"] == "reauth_confirm"
|
||||||
|
@ -2,13 +2,15 @@
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
import requests_mock
|
||||||
|
|
||||||
from homeassistant.components.metoffice.const import ATTRIBUTION, DOMAIN
|
from homeassistant.components.metoffice.const import ATTRIBUTION, DOMAIN
|
||||||
|
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
DEVICE_KEY_KINGSLYNN,
|
DEVICE_KEY_KINGSLYNN,
|
||||||
@ -17,15 +19,15 @@ from .const import (
|
|||||||
METOFFICE_CONFIG_KINGSLYNN,
|
METOFFICE_CONFIG_KINGSLYNN,
|
||||||
METOFFICE_CONFIG_WAVERTREE,
|
METOFFICE_CONFIG_WAVERTREE,
|
||||||
TEST_DATETIME_STRING,
|
TEST_DATETIME_STRING,
|
||||||
TEST_SITE_NAME_KINGSLYNN,
|
TEST_LATITUDE_WAVERTREE,
|
||||||
TEST_SITE_NAME_WAVERTREE,
|
TEST_LONGITUDE_WAVERTREE,
|
||||||
WAVERTREE_SENSOR_RESULTS,
|
WAVERTREE_SENSOR_RESULTS,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, load_fixture
|
from tests.common import MockConfigEntry, load_fixture
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_one_sensor_site_running(
|
async def test_one_sensor_site_running(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
device_registry: dr.DeviceRegistry,
|
device_registry: dr.DeviceRegistry,
|
||||||
@ -34,17 +36,15 @@ async def test_one_sensor_site_running(
|
|||||||
"""Test the Met Office sensor platform."""
|
"""Test the Met Office sensor platform."""
|
||||||
# all metoffice test data encapsulated in here
|
# all metoffice test data encapsulated in here
|
||||||
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
text=wavertree_hourly,
|
text=wavertree_hourly,
|
||||||
)
|
)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
text=wavertree_daily,
|
text=wavertree_daily,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -66,17 +66,15 @@ async def test_one_sensor_site_running(
|
|||||||
assert len(running_sensor_ids) > 0
|
assert len(running_sensor_ids) > 0
|
||||||
for running_id in running_sensor_ids:
|
for running_id in running_sensor_ids:
|
||||||
sensor = hass.states.get(running_id)
|
sensor = hass.states.get(running_id)
|
||||||
sensor_id = sensor.attributes.get("sensor_id")
|
sensor_id = re.search("met_office_wavertree_(.+?)$", running_id).group(1)
|
||||||
_, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
||||||
|
|
||||||
assert sensor.state == sensor_value
|
assert sensor.state == sensor_value
|
||||||
assert sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
assert sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
||||||
assert sensor.attributes.get("site_id") == "354107"
|
|
||||||
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
|
|
||||||
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_two_sensor_sites_running(
|
async def test_two_sensor_sites_running(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
device_registry: dr.DeviceRegistry,
|
device_registry: dr.DeviceRegistry,
|
||||||
@ -86,24 +84,18 @@ async def test_two_sensor_sites_running(
|
|||||||
|
|
||||||
# all metoffice test data encapsulated in here
|
# all metoffice test data encapsulated in here
|
||||||
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
||||||
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=wavertree_hourly,
|
||||||
)
|
)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=daily", text=wavertree_daily
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
)
|
text=wavertree_daily,
|
||||||
requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
|
||||||
)
|
|
||||||
requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=daily", text=kingslynn_daily
|
|
||||||
)
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
@ -112,6 +104,16 @@ async def test_two_sensor_sites_running(
|
|||||||
)
|
)
|
||||||
entry.add_to_hass(hass)
|
entry.add_to_hass(hass)
|
||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=kingslynn_hourly,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=kingslynn_daily,
|
||||||
|
)
|
||||||
|
|
||||||
entry2 = MockConfigEntry(
|
entry2 = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data=METOFFICE_CONFIG_KINGSLYNN,
|
data=METOFFICE_CONFIG_KINGSLYNN,
|
||||||
@ -134,25 +136,70 @@ async def test_two_sensor_sites_running(
|
|||||||
assert len(running_sensor_ids) > 0
|
assert len(running_sensor_ids) > 0
|
||||||
for running_id in running_sensor_ids:
|
for running_id in running_sensor_ids:
|
||||||
sensor = hass.states.get(running_id)
|
sensor = hass.states.get(running_id)
|
||||||
sensor_id = sensor.attributes.get("sensor_id")
|
if "wavertree" in running_id:
|
||||||
if sensor.attributes.get("site_id") == "354107":
|
sensor_id = re.search("met_office_wavertree_(.+?)$", running_id).group(1)
|
||||||
_, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
||||||
assert sensor.state == sensor_value
|
assert sensor.state == sensor_value
|
||||||
assert (
|
assert (
|
||||||
sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
||||||
)
|
)
|
||||||
assert sensor.attributes.get("sensor_id") == sensor_id
|
|
||||||
assert sensor.attributes.get("site_id") == "354107"
|
|
||||||
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_WAVERTREE
|
|
||||||
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
||||||
|
|
||||||
else:
|
else:
|
||||||
_, sensor_value = KINGSLYNN_SENSOR_RESULTS[sensor_id]
|
sensor_id = re.search("met_office_king_s_lynn_(.+?)$", running_id).group(1)
|
||||||
|
sensor_value = KINGSLYNN_SENSOR_RESULTS[sensor_id]
|
||||||
assert sensor.state == sensor_value
|
assert sensor.state == sensor_value
|
||||||
assert (
|
assert (
|
||||||
sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
sensor.attributes.get("last_update").isoformat() == TEST_DATETIME_STRING
|
||||||
)
|
)
|
||||||
assert sensor.attributes.get("sensor_id") == sensor_id
|
|
||||||
assert sensor.attributes.get("site_id") == "322380"
|
|
||||||
assert sensor.attributes.get("site_name") == TEST_SITE_NAME_KINGSLYNN
|
|
||||||
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
assert sensor.attributes.get("attribution") == ATTRIBUTION
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("old_unique_id"),
|
||||||
|
[
|
||||||
|
f"visibility_distance_{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}",
|
||||||
|
f"visibility_distance_{TEST_LATITUDE_WAVERTREE}_{TEST_LONGITUDE_WAVERTREE}_daily",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_legacy_entities_are_removed(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
requests_mock: requests_mock.Mocker,
|
||||||
|
old_unique_id: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test the expected entities are deleted."""
|
||||||
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=wavertree_hourly,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
# Pre-create the entity
|
||||||
|
entity_registry.async_get_or_create(
|
||||||
|
SENSOR_DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
unique_id=old_unique_id,
|
||||||
|
suggested_object_id="met_office_wavertree_visibility_distance",
|
||||||
|
)
|
||||||
|
|
||||||
|
entry = MockConfigEntry(
|
||||||
|
domain=DOMAIN,
|
||||||
|
data=METOFFICE_CONFIG_WAVERTREE,
|
||||||
|
)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id)
|
||||||
|
is None
|
||||||
|
)
|
||||||
|
@ -47,29 +47,24 @@ async def wavertree_data(requests_mock: requests_mock.Mocker) -> dict[str, _Matc
|
|||||||
"""Mock data for the Wavertree location."""
|
"""Mock data for the Wavertree location."""
|
||||||
# all metoffice test data encapsulated in here
|
# all metoffice test data encapsulated in here
|
||||||
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
mock_json = json.loads(load_fixture("metoffice.json", "metoffice"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
sitelist_mock = requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/sitelist/", text=all_sites
|
|
||||||
)
|
|
||||||
wavertree_hourly_mock = requests_mock.get(
|
wavertree_hourly_mock = requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
text=wavertree_hourly,
|
text=wavertree_hourly,
|
||||||
)
|
)
|
||||||
wavertree_daily_mock = requests_mock.get(
|
wavertree_daily_mock = requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
text=wavertree_daily,
|
text=wavertree_daily,
|
||||||
)
|
)
|
||||||
return {
|
return {
|
||||||
"sitelist_mock": sitelist_mock,
|
|
||||||
"wavertree_hourly_mock": wavertree_hourly_mock,
|
"wavertree_hourly_mock": wavertree_hourly_mock,
|
||||||
"wavertree_daily_mock": wavertree_daily_mock,
|
"wavertree_daily_mock": wavertree_daily_mock,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_site_cannot_connect(
|
async def test_site_cannot_connect(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
device_registry: dr.DeviceRegistry,
|
device_registry: dr.DeviceRegistry,
|
||||||
@ -77,9 +72,14 @@ async def test_site_cannot_connect(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Test we handle cannot connect error."""
|
"""Test we handle cannot connect error."""
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
|
requests_mock.get(
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=daily", text="")
|
text="",
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text="",
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -91,15 +91,14 @@ async def test_site_cannot_connect(
|
|||||||
|
|
||||||
assert len(device_registry.devices) == 0
|
assert len(device_registry.devices) == 0
|
||||||
|
|
||||||
assert hass.states.get("weather.met_office_wavertree_3hourly") is None
|
assert hass.states.get("weather.met_office_wavertree") is None
|
||||||
assert hass.states.get("weather.met_office_wavertree_daily") is None
|
|
||||||
for sensor in WAVERTREE_SENSOR_RESULTS.values():
|
for sensor in WAVERTREE_SENSOR_RESULTS.values():
|
||||||
sensor_name = sensor[0]
|
sensor_name = sensor[0]
|
||||||
sensor = hass.states.get(f"sensor.wavertree_{sensor_name}")
|
sensor = hass.states.get(f"sensor.wavertree_{sensor_name}")
|
||||||
assert sensor is None
|
assert sensor is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_site_cannot_update(
|
async def test_site_cannot_update(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
requests_mock: requests_mock.Mocker,
|
requests_mock: requests_mock.Mocker,
|
||||||
@ -115,21 +114,43 @@ async def test_site_cannot_update(
|
|||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
weather = hass.states.get("weather.met_office_wavertree_daily")
|
weather = hass.states.get("weather.met_office_wavertree")
|
||||||
assert weather
|
assert weather
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
requests_mock.get(
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=daily", text="")
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text="",
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text="",
|
||||||
|
)
|
||||||
|
|
||||||
future_time = utcnow() + timedelta(minutes=20)
|
future_time = utcnow() + timedelta(minutes=40)
|
||||||
async_fire_time_changed(hass, future_time)
|
async_fire_time_changed(hass, future_time)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
weather = hass.states.get("weather.met_office_wavertree_daily")
|
weather = hass.states.get("weather.met_office_wavertree")
|
||||||
|
assert weather.state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
status_code=404,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
status_code=404,
|
||||||
|
)
|
||||||
|
|
||||||
|
future_time = utcnow() + timedelta(minutes=40)
|
||||||
|
async_fire_time_changed(hass, future_time)
|
||||||
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
weather = hass.states.get("weather.met_office_wavertree")
|
||||||
assert weather.state == STATE_UNAVAILABLE
|
assert weather.state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_one_weather_site_running(
|
async def test_one_weather_site_running(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
device_registry: dr.DeviceRegistry,
|
device_registry: dr.DeviceRegistry,
|
||||||
@ -153,17 +174,17 @@ async def test_one_weather_site_running(
|
|||||||
assert device_wavertree.name == "Met Office Wavertree"
|
assert device_wavertree.name == "Met Office Wavertree"
|
||||||
|
|
||||||
# Wavertree daily weather platform expected results
|
# Wavertree daily weather platform expected results
|
||||||
weather = hass.states.get("weather.met_office_wavertree_daily")
|
weather = hass.states.get("weather.met_office_wavertree")
|
||||||
assert weather
|
assert weather
|
||||||
|
|
||||||
assert weather.state == "sunny"
|
assert weather.state == "rainy"
|
||||||
assert weather.attributes.get("temperature") == 19
|
assert weather.attributes.get("temperature") == 9.3
|
||||||
assert weather.attributes.get("wind_speed") == 14.48
|
assert weather.attributes.get("wind_speed") == 28.33
|
||||||
assert weather.attributes.get("wind_bearing") == "SSE"
|
assert weather.attributes.get("wind_bearing") == 176.0
|
||||||
assert weather.attributes.get("humidity") == 50
|
assert weather.attributes.get("humidity") == 95
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_two_weather_sites_running(
|
async def test_two_weather_sites_running(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
device_registry: dr.DeviceRegistry,
|
device_registry: dr.DeviceRegistry,
|
||||||
@ -177,19 +198,23 @@ async def test_two_weather_sites_running(
|
|||||||
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
||||||
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
||||||
|
|
||||||
requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
|
||||||
)
|
|
||||||
requests_mock.get(
|
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=daily", text=kingslynn_daily
|
|
||||||
)
|
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data=METOFFICE_CONFIG_WAVERTREE,
|
data=METOFFICE_CONFIG_WAVERTREE,
|
||||||
)
|
)
|
||||||
entry.add_to_hass(hass)
|
entry.add_to_hass(hass)
|
||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/hourly",
|
||||||
|
text=kingslynn_hourly,
|
||||||
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"https://data.hub.api.metoffice.gov.uk/sitespecific/v0/point/daily",
|
||||||
|
text=kingslynn_daily,
|
||||||
|
)
|
||||||
|
|
||||||
entry2 = MockConfigEntry(
|
entry2 = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
data=METOFFICE_CONFIG_KINGSLYNN,
|
data=METOFFICE_CONFIG_KINGSLYNN,
|
||||||
@ -209,29 +234,29 @@ async def test_two_weather_sites_running(
|
|||||||
assert device_wavertree.name == "Met Office Wavertree"
|
assert device_wavertree.name == "Met Office Wavertree"
|
||||||
|
|
||||||
# Wavertree daily weather platform expected results
|
# Wavertree daily weather platform expected results
|
||||||
weather = hass.states.get("weather.met_office_wavertree_daily")
|
weather = hass.states.get("weather.met_office_wavertree")
|
||||||
assert weather
|
assert weather
|
||||||
|
|
||||||
assert weather.state == "sunny"
|
assert weather.state == "rainy"
|
||||||
assert weather.attributes.get("temperature") == 19
|
assert weather.attributes.get("temperature") == 9.3
|
||||||
assert weather.attributes.get("wind_speed") == 14.48
|
assert weather.attributes.get("wind_speed") == 28.33
|
||||||
assert weather.attributes.get("wind_speed_unit") == "km/h"
|
assert weather.attributes.get("wind_speed_unit") == "km/h"
|
||||||
assert weather.attributes.get("wind_bearing") == "SSE"
|
assert weather.attributes.get("wind_bearing") == 176.0
|
||||||
assert weather.attributes.get("humidity") == 50
|
assert weather.attributes.get("humidity") == 95
|
||||||
|
|
||||||
# King's Lynn daily weather platform expected results
|
# King's Lynn daily weather platform expected results
|
||||||
weather = hass.states.get("weather.met_office_king_s_lynn_daily")
|
weather = hass.states.get("weather.met_office_king_s_lynn")
|
||||||
assert weather
|
assert weather
|
||||||
|
|
||||||
assert weather.state == "cloudy"
|
assert weather.state == "rainy"
|
||||||
assert weather.attributes.get("temperature") == 9
|
assert weather.attributes.get("temperature") == 7.9
|
||||||
assert weather.attributes.get("wind_speed") == 6.44
|
assert weather.attributes.get("wind_speed") == 35.75
|
||||||
assert weather.attributes.get("wind_speed_unit") == "km/h"
|
assert weather.attributes.get("wind_speed_unit") == "km/h"
|
||||||
assert weather.attributes.get("wind_bearing") == "ESE"
|
assert weather.attributes.get("wind_bearing") == 180.0
|
||||||
assert weather.attributes.get("humidity") == 75
|
assert weather.attributes.get("humidity") == 98
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
async def test_new_config_entry(
|
async def test_new_config_entry(
|
||||||
hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data
|
hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data
|
||||||
) -> None:
|
) -> None:
|
||||||
@ -250,7 +275,7 @@ async def test_new_config_entry(
|
|||||||
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1
|
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("service"),
|
("service"),
|
||||||
[SERVICE_GET_FORECASTS],
|
[SERVICE_GET_FORECASTS],
|
||||||
@ -281,7 +306,7 @@ async def test_forecast_service(
|
|||||||
WEATHER_DOMAIN,
|
WEATHER_DOMAIN,
|
||||||
service,
|
service,
|
||||||
{
|
{
|
||||||
"entity_id": "weather.met_office_wavertree_daily",
|
"entity_id": "weather.met_office_wavertree",
|
||||||
"type": forecast_type,
|
"type": forecast_type,
|
||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
@ -289,24 +314,17 @@ async def test_forecast_service(
|
|||||||
)
|
)
|
||||||
assert response == snapshot
|
assert response == snapshot
|
||||||
|
|
||||||
# Calling the services should use cached data
|
|
||||||
assert wavertree_data["wavertree_daily_mock"].call_count == 1
|
|
||||||
assert wavertree_data["wavertree_hourly_mock"].call_count == 1
|
|
||||||
|
|
||||||
# Trigger data refetch
|
# Trigger data refetch
|
||||||
freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1))
|
freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
assert wavertree_data["wavertree_daily_mock"].call_count == 2
|
|
||||||
assert wavertree_data["wavertree_hourly_mock"].call_count == 1
|
|
||||||
|
|
||||||
for forecast_type in ("daily", "hourly"):
|
for forecast_type in ("daily", "hourly"):
|
||||||
response = await hass.services.async_call(
|
response = await hass.services.async_call(
|
||||||
WEATHER_DOMAIN,
|
WEATHER_DOMAIN,
|
||||||
service,
|
service,
|
||||||
{
|
{
|
||||||
"entity_id": "weather.met_office_wavertree_daily",
|
"entity_id": "weather.met_office_wavertree",
|
||||||
"type": forecast_type,
|
"type": forecast_type,
|
||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
@ -314,41 +332,18 @@ async def test_forecast_service(
|
|||||||
)
|
)
|
||||||
assert response == snapshot
|
assert response == snapshot
|
||||||
|
|
||||||
# Calling the services should update the hourly forecast
|
|
||||||
assert wavertree_data["wavertree_daily_mock"].call_count == 2
|
|
||||||
assert wavertree_data["wavertree_hourly_mock"].call_count == 2
|
|
||||||
|
|
||||||
# Update fails
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
|
||||||
|
|
||||||
freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(seconds=1))
|
|
||||||
async_fire_time_changed(hass)
|
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
|
||||||
|
|
||||||
response = await hass.services.async_call(
|
|
||||||
WEATHER_DOMAIN,
|
|
||||||
service,
|
|
||||||
{
|
|
||||||
"entity_id": "weather.met_office_wavertree_daily",
|
|
||||||
"type": "hourly",
|
|
||||||
},
|
|
||||||
blocking=True,
|
|
||||||
return_response=True,
|
|
||||||
)
|
|
||||||
assert response == snapshot
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
|
||||||
async def test_legacy_config_entry_is_removed(
|
async def test_legacy_config_entry_is_removed(
|
||||||
hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data
|
hass: HomeAssistant, entity_registry: er.EntityRegistry, no_sensor, wavertree_data
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test the expected entities are created."""
|
"""Test the expected entities are created."""
|
||||||
# Pre-create the hourly entity
|
# Pre-create the daily entity
|
||||||
entity_registry.async_get_or_create(
|
entity_registry.async_get_or_create(
|
||||||
WEATHER_DOMAIN,
|
WEATHER_DOMAIN,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
"53.38374_-2.90929",
|
"53.38374_-2.90929",
|
||||||
suggested_object_id="met_office_wavertree_3_hourly",
|
suggested_object_id="met_office_wavertree_daily",
|
||||||
)
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
@ -365,8 +360,7 @@ async def test_legacy_config_entry_is_removed(
|
|||||||
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1
|
assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time(datetime.datetime(2020, 4, 25, 12, tzinfo=datetime.UTC))
|
@pytest.mark.freeze_time(datetime.datetime(2024, 11, 23, 12, tzinfo=datetime.UTC))
|
||||||
@pytest.mark.parametrize("forecast_type", ["daily", "hourly"])
|
|
||||||
async def test_forecast_subscription(
|
async def test_forecast_subscription(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_ws_client: WebSocketGenerator,
|
hass_ws_client: WebSocketGenerator,
|
||||||
@ -374,7 +368,6 @@ async def test_forecast_subscription(
|
|||||||
snapshot: SnapshotAssertion,
|
snapshot: SnapshotAssertion,
|
||||||
no_sensor,
|
no_sensor,
|
||||||
wavertree_data: dict[str, _Matcher],
|
wavertree_data: dict[str, _Matcher],
|
||||||
forecast_type: str,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test multiple forecast."""
|
"""Test multiple forecast."""
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
@ -391,8 +384,8 @@ async def test_forecast_subscription(
|
|||||||
await client.send_json_auto_id(
|
await client.send_json_auto_id(
|
||||||
{
|
{
|
||||||
"type": "weather/subscribe_forecast",
|
"type": "weather/subscribe_forecast",
|
||||||
"forecast_type": forecast_type,
|
"forecast_type": "hourly",
|
||||||
"entity_id": "weather.met_office_wavertree_daily",
|
"entity_id": "weather.met_office_wavertree",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
msg = await client.receive_json()
|
msg = await client.receive_json()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user