mirror of
https://github.com/home-assistant/core.git
synced 2025-04-25 09:47:52 +00:00
Add forecasts to MetOffice integration (#50876)
* MetOfficeData now retrieves both 3-hourly and daily data (full forecast data, as well as "now" snapshot) on each update * Bump datapoint API up to latest version * Create 2 sets of sensors - one of each set for 3-hourly and for daily data (same ones initially enabled, for now) * Create two entities (one each for 3-hourly and daily data) and also add in the forecast data for each dataset * Testing changes to accommodate now having two sets of everything for 3-hourly and daily update data * Removed unused import (reported by flake8) * As per conversation with @MatthewFlamm leave the 3-hourly entity's unique_id unchanged (although the display name is changed) * Make some improvements based on reviews Make some improvements and fix up the formatting/linting failures. * Make some improvements based on reviews Make some improvements and fix up the formatting/linting failures. * Added more test coverage * import asyncio * Try to fix test * Rewrote everything using CoordinatorEntity * Fixed config flow * Fixed lint errors Co-authored-by: MrHarcombe <ian.harcombe@gmail.com> Co-authored-by: Henco Appel <hencoappel+github@gmail.com>
This commit is contained in:
parent
23339cff95
commit
2d1744c573
@ -1,7 +1,10 @@
|
|||||||
"""The Met Office integration."""
|
"""The Met Office integration."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import datapoint
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@ -11,11 +14,15 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
|||||||
from .const import (
|
from .const import (
|
||||||
DEFAULT_SCAN_INTERVAL,
|
DEFAULT_SCAN_INTERVAL,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
METOFFICE_COORDINATOR,
|
METOFFICE_COORDINATES,
|
||||||
METOFFICE_DATA,
|
METOFFICE_DAILY_COORDINATOR,
|
||||||
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
|
MODE_3HOURLY,
|
||||||
|
MODE_DAILY,
|
||||||
)
|
)
|
||||||
from .data import MetOfficeData
|
from .data import MetOfficeData
|
||||||
|
from .helpers import fetch_data, fetch_site
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -30,30 +37,53 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
api_key = entry.data[CONF_API_KEY]
|
api_key = entry.data[CONF_API_KEY]
|
||||||
site_name = entry.data[CONF_NAME]
|
site_name = entry.data[CONF_NAME]
|
||||||
|
|
||||||
metoffice_data = MetOfficeData(hass, api_key, latitude, longitude)
|
connection = datapoint.connection(api_key=api_key)
|
||||||
await metoffice_data.async_update_site()
|
|
||||||
if metoffice_data.site_name is None:
|
site = await hass.async_add_executor_job(
|
||||||
|
fetch_site, connection, latitude, longitude
|
||||||
|
)
|
||||||
|
if site is None:
|
||||||
raise ConfigEntryNotReady()
|
raise ConfigEntryNotReady()
|
||||||
|
|
||||||
metoffice_coordinator = DataUpdateCoordinator(
|
async def async_update_3hourly() -> MetOfficeData:
|
||||||
|
return await hass.async_add_executor_job(
|
||||||
|
fetch_data, connection, site, MODE_3HOURLY
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_update_daily() -> MetOfficeData:
|
||||||
|
return await hass.async_add_executor_job(
|
||||||
|
fetch_data, connection, site, MODE_DAILY
|
||||||
|
)
|
||||||
|
|
||||||
|
metoffice_hourly_coordinator = DataUpdateCoordinator(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
name=f"MetOffice Coordinator for {site_name}",
|
name=f"MetOffice Hourly Coordinator for {site_name}",
|
||||||
update_method=metoffice_data.async_update,
|
update_method=async_update_3hourly,
|
||||||
|
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||||
|
)
|
||||||
|
|
||||||
|
metoffice_daily_coordinator = DataUpdateCoordinator(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name=f"MetOffice Daily Coordinator for {site_name}",
|
||||||
|
update_method=async_update_daily,
|
||||||
update_interval=DEFAULT_SCAN_INTERVAL,
|
update_interval=DEFAULT_SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
|
||||||
metoffice_hass_data = hass.data.setdefault(DOMAIN, {})
|
metoffice_hass_data = hass.data.setdefault(DOMAIN, {})
|
||||||
metoffice_hass_data[entry.entry_id] = {
|
metoffice_hass_data[entry.entry_id] = {
|
||||||
METOFFICE_DATA: metoffice_data,
|
METOFFICE_HOURLY_COORDINATOR: metoffice_hourly_coordinator,
|
||||||
METOFFICE_COORDINATOR: metoffice_coordinator,
|
METOFFICE_DAILY_COORDINATOR: metoffice_daily_coordinator,
|
||||||
METOFFICE_NAME: site_name,
|
METOFFICE_NAME: site_name,
|
||||||
|
METOFFICE_COORDINATES: f"{latitude}_{longitude}",
|
||||||
}
|
}
|
||||||
|
|
||||||
# Fetch initial data so we have data when entities subscribe
|
# Fetch initial data so we have data when entities subscribe
|
||||||
await metoffice_coordinator.async_refresh()
|
await asyncio.gather(
|
||||||
if metoffice_data.now is None:
|
metoffice_hourly_coordinator.async_config_entry_first_refresh(),
|
||||||
raise ConfigEntryNotReady()
|
metoffice_daily_coordinator.async_config_entry_first_refresh(),
|
||||||
|
)
|
||||||
|
|
||||||
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
|
||||||
|
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
"""Config flow for Met Office integration."""
|
"""Config flow for Met Office integration."""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import datapoint
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant import config_entries, core, exceptions
|
from homeassistant import config_entries, core, exceptions
|
||||||
|
from homeassistant.components.metoffice.helpers import fetch_site
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .data import MetOfficeData
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -22,12 +23,16 @@ async def validate_input(hass: core.HomeAssistant, data):
|
|||||||
longitude = data[CONF_LONGITUDE]
|
longitude = data[CONF_LONGITUDE]
|
||||||
api_key = data[CONF_API_KEY]
|
api_key = data[CONF_API_KEY]
|
||||||
|
|
||||||
metoffice_data = MetOfficeData(hass, api_key, latitude, longitude)
|
connection = datapoint.connection(api_key=api_key)
|
||||||
await metoffice_data.async_update_site()
|
|
||||||
if metoffice_data.site_name is None:
|
site = await hass.async_add_executor_job(
|
||||||
|
fetch_site, connection, latitude, longitude
|
||||||
|
)
|
||||||
|
|
||||||
|
if site is None:
|
||||||
raise CannotConnect()
|
raise CannotConnect()
|
||||||
|
|
||||||
return {"site_name": metoffice_data.site_name}
|
return {"site_name": site.name}
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
class MetOfficeConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
|
@ -25,12 +25,16 @@ ATTRIBUTION = "Data provided by the Met Office"
|
|||||||
|
|
||||||
DEFAULT_SCAN_INTERVAL = timedelta(minutes=15)
|
DEFAULT_SCAN_INTERVAL = timedelta(minutes=15)
|
||||||
|
|
||||||
METOFFICE_DATA = "metoffice_data"
|
METOFFICE_COORDINATES = "metoffice_coordinates"
|
||||||
METOFFICE_COORDINATOR = "metoffice_coordinator"
|
METOFFICE_HOURLY_COORDINATOR = "metoffice_hourly_coordinator"
|
||||||
|
METOFFICE_DAILY_COORDINATOR = "metoffice_daily_coordinator"
|
||||||
METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions"
|
METOFFICE_MONITORED_CONDITIONS = "metoffice_monitored_conditions"
|
||||||
METOFFICE_NAME = "metoffice_name"
|
METOFFICE_NAME = "metoffice_name"
|
||||||
|
|
||||||
MODE_3HOURLY = "3hourly"
|
MODE_3HOURLY = "3hourly"
|
||||||
|
MODE_3HOURLY_LABEL = "3-Hourly"
|
||||||
|
MODE_DAILY = "daily"
|
||||||
|
MODE_DAILY_LABEL = "Daily"
|
||||||
|
|
||||||
CONDITION_CLASSES = {
|
CONDITION_CLASSES = {
|
||||||
ATTR_CONDITION_CLOUDY: ["7", "8"],
|
ATTR_CONDITION_CLOUDY: ["7", "8"],
|
||||||
|
@ -1,78 +1,11 @@
|
|||||||
"""Common Met Office Data class used by both sensor and entity."""
|
"""Common Met Office Data class used by both sensor and entity."""
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import datapoint
|
|
||||||
|
|
||||||
from .const import MODE_3HOURLY
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeData:
|
class MetOfficeData:
|
||||||
"""Get current and forecast data from Datapoint.
|
"""Data structure for MetOffice weather and forecast."""
|
||||||
|
|
||||||
Please note that the 'datapoint' library is not asyncio-friendly, so some
|
def __init__(self, now, forecast, site):
|
||||||
calls have had to be wrapped with the standard hassio helper
|
|
||||||
async_add_executor_job.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, hass, api_key, latitude, longitude):
|
|
||||||
"""Initialize the data object."""
|
"""Initialize the data object."""
|
||||||
self._hass = hass
|
self.now = now
|
||||||
self._datapoint = datapoint.connection(api_key=api_key)
|
self.forecast = forecast
|
||||||
self._site = None
|
self.site = site
|
||||||
|
|
||||||
# Public attributes
|
|
||||||
self.latitude = latitude
|
|
||||||
self.longitude = longitude
|
|
||||||
|
|
||||||
# Holds the current data from the Met Office
|
|
||||||
self.site_id = None
|
|
||||||
self.site_name = None
|
|
||||||
self.now = None
|
|
||||||
|
|
||||||
async def async_update_site(self):
|
|
||||||
"""Async wrapper for getting the DataPoint site."""
|
|
||||||
return await self._hass.async_add_executor_job(self._update_site)
|
|
||||||
|
|
||||||
def _update_site(self):
|
|
||||||
"""Return the nearest DataPoint Site to the held latitude/longitude."""
|
|
||||||
try:
|
|
||||||
new_site = self._datapoint.get_nearest_forecast_site(
|
|
||||||
latitude=self.latitude, longitude=self.longitude
|
|
||||||
)
|
|
||||||
if self._site is None or self._site.id != new_site.id:
|
|
||||||
self._site = new_site
|
|
||||||
self.now = None
|
|
||||||
|
|
||||||
self.site_id = self._site.id
|
|
||||||
self.site_name = self._site.name
|
|
||||||
|
|
||||||
except datapoint.exceptions.APIException as err:
|
|
||||||
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
|
|
||||||
self._site = None
|
|
||||||
self.site_id = None
|
|
||||||
self.site_name = None
|
|
||||||
self.now = None
|
|
||||||
|
|
||||||
return self._site
|
|
||||||
|
|
||||||
async def async_update(self):
|
|
||||||
"""Async wrapper for update method."""
|
|
||||||
return await self._hass.async_add_executor_job(self._update)
|
|
||||||
|
|
||||||
def _update(self):
|
|
||||||
"""Get the latest data from DataPoint."""
|
|
||||||
if self._site is None:
|
|
||||||
_LOGGER.error("No Met Office forecast site held, check logs for problems")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
forecast = self._datapoint.get_forecast_for_site(
|
|
||||||
self._site.id, MODE_3HOURLY
|
|
||||||
)
|
|
||||||
self.now = forecast.now()
|
|
||||||
except (ValueError, datapoint.exceptions.APIException) as err:
|
|
||||||
_LOGGER.error("Check Met Office connection: %s", err.args)
|
|
||||||
self.now = None
|
|
||||||
|
44
homeassistant/components/metoffice/helpers.py
Normal file
44
homeassistant/components/metoffice/helpers.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
"""Helpers used for Met Office integration."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import datapoint
|
||||||
|
|
||||||
|
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||||
|
from homeassistant.util import utcnow
|
||||||
|
|
||||||
|
from .data import MetOfficeData
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_site(connection: datapoint.Manager, latitude, longitude):
|
||||||
|
"""Fetch site information from Datapoint API."""
|
||||||
|
try:
|
||||||
|
return connection.get_nearest_forecast_site(
|
||||||
|
latitude=latitude, longitude=longitude
|
||||||
|
)
|
||||||
|
except datapoint.exceptions.APIException as err:
|
||||||
|
_LOGGER.error("Received error from Met Office Datapoint: %s", err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_data(connection: datapoint.Manager, site, mode) -> MetOfficeData:
|
||||||
|
"""Fetch weather and forecast from Datapoint API."""
|
||||||
|
try:
|
||||||
|
forecast = connection.get_forecast_for_site(site.id, mode)
|
||||||
|
except (ValueError, datapoint.exceptions.APIException) as err:
|
||||||
|
_LOGGER.error("Check Met Office connection: %s", err.args)
|
||||||
|
raise UpdateFailed from err
|
||||||
|
else:
|
||||||
|
time_now = utcnow()
|
||||||
|
return MetOfficeData(
|
||||||
|
forecast.now(),
|
||||||
|
[
|
||||||
|
timestep
|
||||||
|
for day in forecast.days
|
||||||
|
for timestep in day.timesteps
|
||||||
|
if timestep.date > time_now
|
||||||
|
],
|
||||||
|
site,
|
||||||
|
)
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "metoffice",
|
"domain": "metoffice",
|
||||||
"name": "Met Office",
|
"name": "Met Office",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/metoffice",
|
"documentation": "https://www.home-assistant.io/integrations/metoffice",
|
||||||
"requirements": ["datapoint==0.9.5"],
|
"requirements": ["datapoint==0.9.8"],
|
||||||
"codeowners": ["@MrHarcombe"],
|
"codeowners": ["@MrHarcombe"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"iot_class": "cloud_polling"
|
"iot_class": "cloud_polling"
|
||||||
|
@ -10,16 +10,21 @@ from homeassistant.const import (
|
|||||||
TEMP_CELSIUS,
|
TEMP_CELSIUS,
|
||||||
UV_INDEX,
|
UV_INDEX,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
CONDITION_CLASSES,
|
CONDITION_CLASSES,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
METOFFICE_COORDINATOR,
|
METOFFICE_COORDINATES,
|
||||||
METOFFICE_DATA,
|
METOFFICE_DAILY_COORDINATOR,
|
||||||
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
|
MODE_3HOURLY_LABEL,
|
||||||
|
MODE_DAILY,
|
||||||
|
MODE_DAILY_LABEL,
|
||||||
VISIBILITY_CLASSES,
|
VISIBILITY_CLASSES,
|
||||||
VISIBILITY_DISTANCE_CLASSES,
|
VISIBILITY_DISTANCE_CLASSES,
|
||||||
)
|
)
|
||||||
@ -85,28 +90,40 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
MetOfficeCurrentSensor(entry.data, hass_data, sensor_type)
|
MetOfficeCurrentSensor(
|
||||||
|
hass_data[METOFFICE_HOURLY_COORDINATOR], hass_data, True, sensor_type
|
||||||
|
)
|
||||||
|
for sensor_type in SENSOR_TYPES
|
||||||
|
]
|
||||||
|
+ [
|
||||||
|
MetOfficeCurrentSensor(
|
||||||
|
hass_data[METOFFICE_DAILY_COORDINATOR], hass_data, False, sensor_type
|
||||||
|
)
|
||||||
for sensor_type in SENSOR_TYPES
|
for sensor_type in SENSOR_TYPES
|
||||||
],
|
],
|
||||||
False,
|
False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeCurrentSensor(SensorEntity):
|
class MetOfficeCurrentSensor(CoordinatorEntity, SensorEntity):
|
||||||
"""Implementation of a Met Office current weather condition sensor."""
|
"""Implementation of a Met Office current weather condition sensor."""
|
||||||
|
|
||||||
def __init__(self, entry_data, hass_data, sensor_type):
|
def __init__(self, coordinator, hass_data, use_3hourly, sensor_type):
|
||||||
"""Initialize the sensor."""
|
"""Initialize the sensor."""
|
||||||
self._data = hass_data[METOFFICE_DATA]
|
super().__init__(coordinator)
|
||||||
self._coordinator = hass_data[METOFFICE_COORDINATOR]
|
|
||||||
|
|
||||||
self._type = sensor_type
|
self._type = sensor_type
|
||||||
self._name = f"{hass_data[METOFFICE_NAME]} {SENSOR_TYPES[self._type][0]}"
|
mode_label = MODE_3HOURLY_LABEL if use_3hourly else MODE_DAILY_LABEL
|
||||||
self._unique_id = f"{SENSOR_TYPES[self._type][0]}_{self._data.latitude}_{self._data.longitude}"
|
self._name = (
|
||||||
|
f"{hass_data[METOFFICE_NAME]} {SENSOR_TYPES[self._type][0]} {mode_label}"
|
||||||
|
)
|
||||||
|
self._unique_id = (
|
||||||
|
f"{SENSOR_TYPES[self._type][0]}_{hass_data[METOFFICE_COORDINATES]}"
|
||||||
|
)
|
||||||
|
if not use_3hourly:
|
||||||
|
self._unique_id = f"{self._unique_id}_{MODE_DAILY}"
|
||||||
|
|
||||||
self.metoffice_site_id = None
|
self.use_3hourly = use_3hourly
|
||||||
self.metoffice_site_name = None
|
|
||||||
self.metoffice_now = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -124,22 +141,26 @@ class MetOfficeCurrentSensor(SensorEntity):
|
|||||||
value = None
|
value = None
|
||||||
|
|
||||||
if self._type == "visibility_distance" and hasattr(
|
if self._type == "visibility_distance" and hasattr(
|
||||||
self.metoffice_now, "visibility"
|
self.coordinator.data.now, "visibility"
|
||||||
):
|
):
|
||||||
value = VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value)
|
value = VISIBILITY_DISTANCE_CLASSES.get(
|
||||||
|
self.coordinator.data.now.visibility.value
|
||||||
|
)
|
||||||
|
|
||||||
if self._type == "visibility" and hasattr(self.metoffice_now, "visibility"):
|
if self._type == "visibility" and hasattr(
|
||||||
value = VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value)
|
self.coordinator.data.now, "visibility"
|
||||||
|
):
|
||||||
|
value = VISIBILITY_CLASSES.get(self.coordinator.data.now.visibility.value)
|
||||||
|
|
||||||
elif self._type == "weather" and hasattr(self.metoffice_now, self._type):
|
elif self._type == "weather" and hasattr(self.coordinator.data.now, self._type):
|
||||||
value = [
|
value = [
|
||||||
k
|
k
|
||||||
for k, v in CONDITION_CLASSES.items()
|
for k, v in CONDITION_CLASSES.items()
|
||||||
if self.metoffice_now.weather.value in v
|
if self.coordinator.data.now.weather.value in v
|
||||||
][0]
|
][0]
|
||||||
|
|
||||||
elif hasattr(self.metoffice_now, self._type):
|
elif hasattr(self.coordinator.data.now, self._type):
|
||||||
value = getattr(self.metoffice_now, self._type)
|
value = getattr(self.coordinator.data.now, self._type)
|
||||||
|
|
||||||
if not isinstance(value, int):
|
if not isinstance(value, int):
|
||||||
value = value.value
|
value = value.value
|
||||||
@ -175,44 +196,13 @@ class MetOfficeCurrentSensor(SensorEntity):
|
|||||||
"""Return the state attributes of the device."""
|
"""Return the state attributes of the device."""
|
||||||
return {
|
return {
|
||||||
ATTR_ATTRIBUTION: ATTRIBUTION,
|
ATTR_ATTRIBUTION: ATTRIBUTION,
|
||||||
ATTR_LAST_UPDATE: self.metoffice_now.date if self.metoffice_now else None,
|
ATTR_LAST_UPDATE: self.coordinator.data.now.date,
|
||||||
ATTR_SENSOR_ID: self._type,
|
ATTR_SENSOR_ID: self._type,
|
||||||
ATTR_SITE_ID: self.metoffice_site_id if self.metoffice_site_id else None,
|
ATTR_SITE_ID: self.coordinator.data.site.id,
|
||||||
ATTR_SITE_NAME: self.metoffice_site_name
|
ATTR_SITE_NAME: self.coordinator.data.site.name,
|
||||||
if self.metoffice_site_name
|
|
||||||
else None,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
|
||||||
"""Set up a listener and load data."""
|
|
||||||
self.async_on_remove(
|
|
||||||
self._coordinator.async_add_listener(self._update_callback)
|
|
||||||
)
|
|
||||||
self._update_callback()
|
|
||||||
|
|
||||||
async def async_update(self):
|
|
||||||
"""Schedule a custom update via the common entity update service."""
|
|
||||||
await self._coordinator.async_request_refresh()
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _update_callback(self) -> None:
|
|
||||||
"""Load data from integration."""
|
|
||||||
self.metoffice_site_id = self._data.site_id
|
|
||||||
self.metoffice_site_name = self._data.site_name
|
|
||||||
self.metoffice_now = self._data.now
|
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self) -> bool:
|
|
||||||
"""Entities do not individually poll."""
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def entity_registry_enabled_default(self) -> bool:
|
def entity_registry_enabled_default(self) -> bool:
|
||||||
"""Return if the entity should be enabled when first added to the entity registry."""
|
"""Return if the entity should be enabled when first added to the entity registry."""
|
||||||
return SENSOR_TYPES[self._type][4]
|
return SENSOR_TYPES[self._type][4] and self.use_3hourly
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self):
|
|
||||||
"""Return if state is available."""
|
|
||||||
return self.metoffice_site_id is not None and self.metoffice_now is not None
|
|
||||||
|
@ -1,17 +1,30 @@
|
|||||||
"""Support for UK Met Office weather service."""
|
"""Support for UK Met Office weather service."""
|
||||||
from homeassistant.components.weather import WeatherEntity
|
from homeassistant.components.weather import (
|
||||||
|
ATTR_FORECAST_CONDITION,
|
||||||
|
ATTR_FORECAST_PRECIPITATION,
|
||||||
|
ATTR_FORECAST_TEMP,
|
||||||
|
ATTR_FORECAST_TIME,
|
||||||
|
ATTR_FORECAST_WIND_BEARING,
|
||||||
|
ATTR_FORECAST_WIND_SPEED,
|
||||||
|
WeatherEntity,
|
||||||
|
)
|
||||||
from homeassistant.const import LENGTH_KILOMETERS, TEMP_CELSIUS
|
from homeassistant.const import LENGTH_KILOMETERS, TEMP_CELSIUS
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTRIBUTION,
|
ATTRIBUTION,
|
||||||
CONDITION_CLASSES,
|
CONDITION_CLASSES,
|
||||||
DEFAULT_NAME,
|
DEFAULT_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
METOFFICE_COORDINATOR,
|
METOFFICE_COORDINATES,
|
||||||
METOFFICE_DATA,
|
METOFFICE_DAILY_COORDINATOR,
|
||||||
|
METOFFICE_HOURLY_COORDINATOR,
|
||||||
METOFFICE_NAME,
|
METOFFICE_NAME,
|
||||||
|
MODE_3HOURLY_LABEL,
|
||||||
|
MODE_DAILY,
|
||||||
|
MODE_DAILY_LABEL,
|
||||||
VISIBILITY_CLASSES,
|
VISIBILITY_CLASSES,
|
||||||
VISIBILITY_DISTANCE_CLASSES,
|
VISIBILITY_DISTANCE_CLASSES,
|
||||||
)
|
)
|
||||||
@ -25,27 +38,48 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[
|
[
|
||||||
MetOfficeWeather(
|
MetOfficeWeather(hass_data[METOFFICE_HOURLY_COORDINATOR], hass_data, True),
|
||||||
entry.data,
|
MetOfficeWeather(hass_data[METOFFICE_DAILY_COORDINATOR], hass_data, False),
|
||||||
hass_data,
|
|
||||||
)
|
|
||||||
],
|
],
|
||||||
False,
|
False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MetOfficeWeather(WeatherEntity):
|
def _build_forecast_data(timestep):
|
||||||
|
data = {}
|
||||||
|
data[ATTR_FORECAST_TIME] = timestep.date
|
||||||
|
if timestep.weather:
|
||||||
|
data[ATTR_FORECAST_CONDITION] = _get_weather_condition(timestep.weather.value)
|
||||||
|
if timestep.precipitation:
|
||||||
|
data[ATTR_FORECAST_PRECIPITATION] = timestep.precipitation.value
|
||||||
|
if timestep.temperature:
|
||||||
|
data[ATTR_FORECAST_TEMP] = timestep.temperature.value
|
||||||
|
if timestep.wind_direction:
|
||||||
|
data[ATTR_FORECAST_WIND_BEARING] = timestep.wind_direction.value
|
||||||
|
if timestep.wind_speed:
|
||||||
|
data[ATTR_FORECAST_WIND_SPEED] = timestep.wind_speed.value
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _get_weather_condition(metoffice_code):
|
||||||
|
for hass_name, metoffice_codes in CONDITION_CLASSES.items():
|
||||||
|
if metoffice_code in metoffice_codes:
|
||||||
|
return hass_name
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class MetOfficeWeather(CoordinatorEntity, WeatherEntity):
|
||||||
"""Implementation of a Met Office weather condition."""
|
"""Implementation of a Met Office weather condition."""
|
||||||
|
|
||||||
def __init__(self, entry_data, hass_data):
|
def __init__(self, coordinator, hass_data, use_3hourly):
|
||||||
"""Initialise the platform with a data instance."""
|
"""Initialise the platform with a data instance."""
|
||||||
self._data = hass_data[METOFFICE_DATA]
|
super().__init__(coordinator)
|
||||||
self._coordinator = hass_data[METOFFICE_COORDINATOR]
|
|
||||||
|
|
||||||
self._name = f"{DEFAULT_NAME} {hass_data[METOFFICE_NAME]}"
|
mode_label = MODE_3HOURLY_LABEL if use_3hourly else MODE_DAILY_LABEL
|
||||||
self._unique_id = f"{self._data.latitude}_{self._data.longitude}"
|
self._name = f"{DEFAULT_NAME} {hass_data[METOFFICE_NAME]} {mode_label}"
|
||||||
|
self._unique_id = hass_data[METOFFICE_COORDINATES]
|
||||||
self.metoffice_now = None
|
if not use_3hourly:
|
||||||
|
self._unique_id = f"{self._unique_id}_{MODE_DAILY}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -60,24 +94,16 @@ class MetOfficeWeather(WeatherEntity):
|
|||||||
@property
|
@property
|
||||||
def condition(self):
|
def condition(self):
|
||||||
"""Return the current condition."""
|
"""Return the current condition."""
|
||||||
return (
|
if self.coordinator.data.now:
|
||||||
[
|
return _get_weather_condition(self.coordinator.data.now.weather.value)
|
||||||
k
|
return None
|
||||||
for k, v in CONDITION_CLASSES.items()
|
|
||||||
if self.metoffice_now.weather.value in v
|
|
||||||
][0]
|
|
||||||
if self.metoffice_now
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def temperature(self):
|
def temperature(self):
|
||||||
"""Return the platform temperature."""
|
"""Return the platform temperature."""
|
||||||
return (
|
if self.coordinator.data.now.temperature:
|
||||||
self.metoffice_now.temperature.value
|
return self.coordinator.data.now.temperature.value
|
||||||
if self.metoffice_now and self.metoffice_now.temperature
|
return None
|
||||||
else None
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def temperature_unit(self):
|
def temperature_unit(self):
|
||||||
@ -88,8 +114,13 @@ class MetOfficeWeather(WeatherEntity):
|
|||||||
def visibility(self):
|
def visibility(self):
|
||||||
"""Return the platform visibility."""
|
"""Return the platform visibility."""
|
||||||
_visibility = None
|
_visibility = None
|
||||||
if hasattr(self.metoffice_now, "visibility"):
|
weather_now = self.coordinator.data.now
|
||||||
_visibility = f"{VISIBILITY_CLASSES.get(self.metoffice_now.visibility.value)} - {VISIBILITY_DISTANCE_CLASSES.get(self.metoffice_now.visibility.value)}"
|
if hasattr(weather_now, "visibility"):
|
||||||
|
visibility_class = VISIBILITY_CLASSES.get(weather_now.visibility.value)
|
||||||
|
visibility_distance = VISIBILITY_DISTANCE_CLASSES.get(
|
||||||
|
weather_now.visibility.value
|
||||||
|
)
|
||||||
|
_visibility = f"{visibility_class} - {visibility_distance}"
|
||||||
return _visibility
|
return _visibility
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -100,63 +131,46 @@ class MetOfficeWeather(WeatherEntity):
|
|||||||
@property
|
@property
|
||||||
def pressure(self):
|
def pressure(self):
|
||||||
"""Return the mean sea-level pressure."""
|
"""Return the mean sea-level pressure."""
|
||||||
return (
|
weather_now = self.coordinator.data.now
|
||||||
self.metoffice_now.pressure.value
|
if weather_now and weather_now.pressure:
|
||||||
if self.metoffice_now and self.metoffice_now.pressure
|
return weather_now.pressure.value
|
||||||
else None
|
return None
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def humidity(self):
|
def humidity(self):
|
||||||
"""Return the relative humidity."""
|
"""Return the relative humidity."""
|
||||||
return (
|
weather_now = self.coordinator.data.now
|
||||||
self.metoffice_now.humidity.value
|
if weather_now and weather_now.humidity:
|
||||||
if self.metoffice_now and self.metoffice_now.humidity
|
return weather_now.humidity.value
|
||||||
else None
|
return None
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def wind_speed(self):
|
def wind_speed(self):
|
||||||
"""Return the wind speed."""
|
"""Return the wind speed."""
|
||||||
return (
|
weather_now = self.coordinator.data.now
|
||||||
self.metoffice_now.wind_speed.value
|
if weather_now and weather_now.wind_speed:
|
||||||
if self.metoffice_now and self.metoffice_now.wind_speed
|
return weather_now.wind_speed.value
|
||||||
else None
|
return None
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def wind_bearing(self):
|
def wind_bearing(self):
|
||||||
"""Return the wind bearing."""
|
"""Return the wind bearing."""
|
||||||
return (
|
weather_now = self.coordinator.data.now
|
||||||
self.metoffice_now.wind_direction.value
|
if weather_now and weather_now.wind_direction:
|
||||||
if self.metoffice_now and self.metoffice_now.wind_direction
|
return weather_now.wind_direction.value
|
||||||
else None
|
return None
|
||||||
)
|
|
||||||
|
@property
|
||||||
|
def forecast(self):
|
||||||
|
"""Return the forecast array."""
|
||||||
|
if self.coordinator.data.forecast is None:
|
||||||
|
return None
|
||||||
|
return [
|
||||||
|
_build_forecast_data(timestep)
|
||||||
|
for timestep in self.coordinator.data.forecast
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def attribution(self):
|
def attribution(self):
|
||||||
"""Return the attribution."""
|
"""Return the attribution."""
|
||||||
return ATTRIBUTION
|
return ATTRIBUTION
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
|
||||||
"""Set up a listener and load data."""
|
|
||||||
self.async_on_remove(
|
|
||||||
self._coordinator.async_add_listener(self._update_callback)
|
|
||||||
)
|
|
||||||
self._update_callback()
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _update_callback(self) -> None:
|
|
||||||
"""Load data from integration."""
|
|
||||||
self.metoffice_now = self._data.now
|
|
||||||
self.async_write_ha_state()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def should_poll(self) -> bool:
|
|
||||||
"""Entities do not individually poll."""
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self):
|
|
||||||
"""Return if state is available."""
|
|
||||||
return self.metoffice_now is not None
|
|
||||||
|
@ -473,7 +473,7 @@ coronavirus==1.1.1
|
|||||||
datadog==0.15.0
|
datadog==0.15.0
|
||||||
|
|
||||||
# homeassistant.components.metoffice
|
# homeassistant.components.metoffice
|
||||||
datapoint==0.9.5
|
datapoint==0.9.8
|
||||||
|
|
||||||
# homeassistant.components.debugpy
|
# homeassistant.components.debugpy
|
||||||
debugpy==1.3.0
|
debugpy==1.3.0
|
||||||
|
@ -267,7 +267,7 @@ coronavirus==1.1.1
|
|||||||
datadog==0.15.0
|
datadog==0.15.0
|
||||||
|
|
||||||
# homeassistant.components.metoffice
|
# homeassistant.components.metoffice
|
||||||
datapoint==0.9.5
|
datapoint==0.9.8
|
||||||
|
|
||||||
# homeassistant.components.debugpy
|
# homeassistant.components.debugpy
|
||||||
debugpy==1.3.0
|
debugpy==1.3.0
|
||||||
|
@ -68,6 +68,10 @@ async def test_form_already_configured(hass, requests_mock):
|
|||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
||||||
text="",
|
text="",
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
||||||
|
text="",
|
||||||
|
)
|
||||||
|
|
||||||
MockConfigEntry(
|
MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
|
@ -29,12 +29,17 @@ async def test_one_sensor_site_running(hass, requests_mock, legacy_patchable_tim
|
|||||||
mock_json = json.loads(load_fixture("metoffice.json"))
|
mock_json = json.loads(load_fixture("metoffice.json"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
all_sites = json.dumps(mock_json["all_sites"])
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
||||||
text=wavertree_hourly,
|
text=wavertree_hourly,
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -72,15 +77,23 @@ async def test_two_sensor_sites_running(hass, requests_mock, legacy_patchable_ti
|
|||||||
mock_json = json.loads(load_fixture("metoffice.json"))
|
mock_json = json.loads(load_fixture("metoffice.json"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
all_sites = json.dumps(mock_json["all_sites"])
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
||||||
|
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily", text=wavertree_daily
|
||||||
|
)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/322380?res=daily", text=kingslynn_daily
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
|
@ -9,6 +9,7 @@ from homeassistant.util import utcnow
|
|||||||
|
|
||||||
from . import NewDateTime
|
from . import NewDateTime
|
||||||
from .const import (
|
from .const import (
|
||||||
|
DATETIME_FORMAT,
|
||||||
METOFFICE_CONFIG_KINGSLYNN,
|
METOFFICE_CONFIG_KINGSLYNN,
|
||||||
METOFFICE_CONFIG_WAVERTREE,
|
METOFFICE_CONFIG_WAVERTREE,
|
||||||
WAVERTREE_SENSOR_RESULTS,
|
WAVERTREE_SENSOR_RESULTS,
|
||||||
@ -26,6 +27,7 @@ async def test_site_cannot_connect(hass, requests_mock, legacy_patchable_time):
|
|||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text="")
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
||||||
|
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=daily", text="")
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -35,9 +37,10 @@ async def test_site_cannot_connect(hass, requests_mock, legacy_patchable_time):
|
|||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert hass.states.get("weather.met_office_wavertree") is None
|
assert hass.states.get("weather.met_office_wavertree_3hourly") is None
|
||||||
|
assert hass.states.get("weather.met_office_wavertree_daily") is None
|
||||||
for sensor_id in WAVERTREE_SENSOR_RESULTS:
|
for sensor_id in WAVERTREE_SENSOR_RESULTS:
|
||||||
sensor_name, sensor_value = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
sensor_name, _ = WAVERTREE_SENSOR_RESULTS[sensor_id]
|
||||||
sensor = hass.states.get(f"sensor.wavertree_{sensor_name}")
|
sensor = hass.states.get(f"sensor.wavertree_{sensor_name}")
|
||||||
assert sensor is None
|
assert sensor is None
|
||||||
|
|
||||||
@ -53,11 +56,15 @@ async def test_site_cannot_update(hass, requests_mock, legacy_patchable_time):
|
|||||||
mock_json = json.loads(load_fixture("metoffice.json"))
|
mock_json = json.loads(load_fixture("metoffice.json"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
all_sites = json.dumps(mock_json["all_sites"])
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily", text=wavertree_daily
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -67,16 +74,23 @@ async def test_site_cannot_update(hass, requests_mock, legacy_patchable_time):
|
|||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
entity = hass.states.get("weather.met_office_wavertree")
|
entity = hass.states.get("weather.met_office_wavertree_3_hourly")
|
||||||
|
assert entity
|
||||||
|
|
||||||
|
entity = hass.states.get("weather.met_office_wavertree_daily")
|
||||||
assert entity
|
assert entity
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=3hourly", text="")
|
||||||
|
requests_mock.get("/public/data/val/wxfcs/all/json/354107?res=daily", text="")
|
||||||
|
|
||||||
future_time = utcnow() + timedelta(minutes=20)
|
future_time = utcnow() + timedelta(minutes=20)
|
||||||
async_fire_time_changed(hass, future_time)
|
async_fire_time_changed(hass, future_time)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
entity = hass.states.get("weather.met_office_wavertree")
|
entity = hass.states.get("weather.met_office_wavertree_3_hourly")
|
||||||
|
assert entity.state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
entity = hass.states.get("weather.met_office_wavertree_daily")
|
||||||
assert entity.state == STATE_UNAVAILABLE
|
assert entity.state == STATE_UNAVAILABLE
|
||||||
|
|
||||||
|
|
||||||
@ -91,12 +105,17 @@ async def test_one_weather_site_running(hass, requests_mock, legacy_patchable_ti
|
|||||||
mock_json = json.loads(load_fixture("metoffice.json"))
|
mock_json = json.loads(load_fixture("metoffice.json"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
all_sites = json.dumps(mock_json["all_sites"])
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly",
|
||||||
text=wavertree_hourly,
|
text=wavertree_hourly,
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily",
|
||||||
|
text=wavertree_daily,
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -106,8 +125,8 @@ async def test_one_weather_site_running(hass, requests_mock, legacy_patchable_ti
|
|||||||
await hass.config_entries.async_setup(entry.entry_id)
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
# Wavertree weather platform expected results
|
# Wavertree 3-hourly weather platform expected results
|
||||||
entity = hass.states.get("weather.met_office_wavertree")
|
entity = hass.states.get("weather.met_office_wavertree_3_hourly")
|
||||||
assert entity
|
assert entity
|
||||||
|
|
||||||
assert entity.state == "sunny"
|
assert entity.state == "sunny"
|
||||||
@ -117,6 +136,41 @@ async def test_one_weather_site_running(hass, requests_mock, legacy_patchable_ti
|
|||||||
assert entity.attributes.get("visibility") == "Good - 10-20"
|
assert entity.attributes.get("visibility") == "Good - 10-20"
|
||||||
assert entity.attributes.get("humidity") == 50
|
assert entity.attributes.get("humidity") == 50
|
||||||
|
|
||||||
|
# Forecasts added - just pick out 1 entry to check
|
||||||
|
assert len(entity.attributes.get("forecast")) == 35
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[26]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-28 21:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[26]["condition"] == "cloudy"
|
||||||
|
assert entity.attributes.get("forecast")[26]["temperature"] == 10
|
||||||
|
assert entity.attributes.get("forecast")[26]["wind_speed"] == 4
|
||||||
|
assert entity.attributes.get("forecast")[26]["wind_bearing"] == "NNE"
|
||||||
|
|
||||||
|
# Wavertree daily weather platform expected results
|
||||||
|
entity = hass.states.get("weather.met_office_wavertree_daily")
|
||||||
|
assert entity
|
||||||
|
|
||||||
|
assert entity.state == "sunny"
|
||||||
|
assert entity.attributes.get("temperature") == 19
|
||||||
|
assert entity.attributes.get("wind_speed") == 9
|
||||||
|
assert entity.attributes.get("wind_bearing") == "SSE"
|
||||||
|
assert entity.attributes.get("visibility") == "Good - 10-20"
|
||||||
|
assert entity.attributes.get("humidity") == 50
|
||||||
|
|
||||||
|
# Also has Forecasts added - again, just pick out 1 entry to check
|
||||||
|
assert len(entity.attributes.get("forecast")) == 8
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[7]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-29 12:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[7]["condition"] == "rainy"
|
||||||
|
assert entity.attributes.get("forecast")[7]["temperature"] == 13
|
||||||
|
assert entity.attributes.get("forecast")[7]["wind_speed"] == 13
|
||||||
|
assert entity.attributes.get("forecast")[7]["wind_bearing"] == "SE"
|
||||||
|
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
"datapoint.Forecast.datetime.datetime",
|
"datapoint.Forecast.datetime.datetime",
|
||||||
@ -129,15 +183,23 @@ async def test_two_weather_sites_running(hass, requests_mock, legacy_patchable_t
|
|||||||
mock_json = json.loads(load_fixture("metoffice.json"))
|
mock_json = json.loads(load_fixture("metoffice.json"))
|
||||||
all_sites = json.dumps(mock_json["all_sites"])
|
all_sites = json.dumps(mock_json["all_sites"])
|
||||||
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
wavertree_hourly = json.dumps(mock_json["wavertree_hourly"])
|
||||||
|
wavertree_daily = json.dumps(mock_json["wavertree_daily"])
|
||||||
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
kingslynn_hourly = json.dumps(mock_json["kingslynn_hourly"])
|
||||||
|
kingslynn_daily = json.dumps(mock_json["kingslynn_daily"])
|
||||||
|
|
||||||
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
requests_mock.get("/public/data/val/wxfcs/all/json/sitelist/", text=all_sites)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
"/public/data/val/wxfcs/all/json/354107?res=3hourly", text=wavertree_hourly
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/354107?res=daily", text=wavertree_daily
|
||||||
|
)
|
||||||
requests_mock.get(
|
requests_mock.get(
|
||||||
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
"/public/data/val/wxfcs/all/json/322380?res=3hourly", text=kingslynn_hourly
|
||||||
)
|
)
|
||||||
|
requests_mock.get(
|
||||||
|
"/public/data/val/wxfcs/all/json/322380?res=daily", text=kingslynn_daily
|
||||||
|
)
|
||||||
|
|
||||||
entry = MockConfigEntry(
|
entry = MockConfigEntry(
|
||||||
domain=DOMAIN,
|
domain=DOMAIN,
|
||||||
@ -153,8 +215,8 @@ async def test_two_weather_sites_running(hass, requests_mock, legacy_patchable_t
|
|||||||
await hass.config_entries.async_setup(entry2.entry_id)
|
await hass.config_entries.async_setup(entry2.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
# Wavertree weather platform expected results
|
# Wavertree 3-hourly weather platform expected results
|
||||||
entity = hass.states.get("weather.met_office_wavertree")
|
entity = hass.states.get("weather.met_office_wavertree_3_hourly")
|
||||||
assert entity
|
assert entity
|
||||||
|
|
||||||
assert entity.state == "sunny"
|
assert entity.state == "sunny"
|
||||||
@ -164,8 +226,43 @@ async def test_two_weather_sites_running(hass, requests_mock, legacy_patchable_t
|
|||||||
assert entity.attributes.get("visibility") == "Good - 10-20"
|
assert entity.attributes.get("visibility") == "Good - 10-20"
|
||||||
assert entity.attributes.get("humidity") == 50
|
assert entity.attributes.get("humidity") == 50
|
||||||
|
|
||||||
# King's Lynn weather platform expected results
|
# Forecasts added - just pick out 1 entry to check
|
||||||
entity = hass.states.get("weather.met_office_king_s_lynn")
|
assert len(entity.attributes.get("forecast")) == 35
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[18]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-27 21:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[18]["condition"] == "sunny"
|
||||||
|
assert entity.attributes.get("forecast")[18]["temperature"] == 9
|
||||||
|
assert entity.attributes.get("forecast")[18]["wind_speed"] == 4
|
||||||
|
assert entity.attributes.get("forecast")[18]["wind_bearing"] == "NW"
|
||||||
|
|
||||||
|
# Wavertree daily weather platform expected results
|
||||||
|
entity = hass.states.get("weather.met_office_wavertree_daily")
|
||||||
|
assert entity
|
||||||
|
|
||||||
|
assert entity.state == "sunny"
|
||||||
|
assert entity.attributes.get("temperature") == 19
|
||||||
|
assert entity.attributes.get("wind_speed") == 9
|
||||||
|
assert entity.attributes.get("wind_bearing") == "SSE"
|
||||||
|
assert entity.attributes.get("visibility") == "Good - 10-20"
|
||||||
|
assert entity.attributes.get("humidity") == 50
|
||||||
|
|
||||||
|
# Also has Forecasts added - again, just pick out 1 entry to check
|
||||||
|
assert len(entity.attributes.get("forecast")) == 8
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[7]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-29 12:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[7]["condition"] == "rainy"
|
||||||
|
assert entity.attributes.get("forecast")[7]["temperature"] == 13
|
||||||
|
assert entity.attributes.get("forecast")[7]["wind_speed"] == 13
|
||||||
|
assert entity.attributes.get("forecast")[7]["wind_bearing"] == "SE"
|
||||||
|
|
||||||
|
# King's Lynn 3-hourly weather platform expected results
|
||||||
|
entity = hass.states.get("weather.met_office_king_s_lynn_3_hourly")
|
||||||
assert entity
|
assert entity
|
||||||
|
|
||||||
assert entity.state == "sunny"
|
assert entity.state == "sunny"
|
||||||
@ -174,3 +271,38 @@ async def test_two_weather_sites_running(hass, requests_mock, legacy_patchable_t
|
|||||||
assert entity.attributes.get("wind_bearing") == "E"
|
assert entity.attributes.get("wind_bearing") == "E"
|
||||||
assert entity.attributes.get("visibility") == "Very Good - 20-40"
|
assert entity.attributes.get("visibility") == "Very Good - 20-40"
|
||||||
assert entity.attributes.get("humidity") == 60
|
assert entity.attributes.get("humidity") == 60
|
||||||
|
|
||||||
|
# Also has Forecast added - just pick out 1 entry to check
|
||||||
|
assert len(entity.attributes.get("forecast")) == 35
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[18]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-27 21:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[18]["condition"] == "cloudy"
|
||||||
|
assert entity.attributes.get("forecast")[18]["temperature"] == 10
|
||||||
|
assert entity.attributes.get("forecast")[18]["wind_speed"] == 7
|
||||||
|
assert entity.attributes.get("forecast")[18]["wind_bearing"] == "SE"
|
||||||
|
|
||||||
|
# King's Lynn daily weather platform expected results
|
||||||
|
entity = hass.states.get("weather.met_office_king_s_lynn_daily")
|
||||||
|
assert entity
|
||||||
|
|
||||||
|
assert entity.state == "cloudy"
|
||||||
|
assert entity.attributes.get("temperature") == 9
|
||||||
|
assert entity.attributes.get("wind_speed") == 4
|
||||||
|
assert entity.attributes.get("wind_bearing") == "ESE"
|
||||||
|
assert entity.attributes.get("visibility") == "Very Good - 20-40"
|
||||||
|
assert entity.attributes.get("humidity") == 75
|
||||||
|
|
||||||
|
# All should have Forecast added - again, just picking out 1 entry to check
|
||||||
|
assert len(entity.attributes.get("forecast")) == 8
|
||||||
|
|
||||||
|
assert (
|
||||||
|
entity.attributes.get("forecast")[5]["datetime"].strftime(DATETIME_FORMAT)
|
||||||
|
== "2020-04-28 12:00:00+0000"
|
||||||
|
)
|
||||||
|
assert entity.attributes.get("forecast")[5]["condition"] == "cloudy"
|
||||||
|
assert entity.attributes.get("forecast")[5]["temperature"] == 11
|
||||||
|
assert entity.attributes.get("forecast")[5]["wind_speed"] == 7
|
||||||
|
assert entity.attributes.get("forecast")[5]["wind_bearing"] == "ESE"
|
||||||
|
254
tests/fixtures/metoffice.json
vendored
254
tests/fixtures/metoffice.json
vendored
@ -218,6 +218,7 @@
|
|||||||
"U": "0",
|
"U": "0",
|
||||||
"$": "180"
|
"$": "180"
|
||||||
},
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
"D": "NW",
|
"D": "NW",
|
||||||
"F": "10",
|
"F": "10",
|
||||||
@ -1495,5 +1496,258 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"kingslynn_daily": {
|
||||||
|
"SiteRep": {
|
||||||
|
"Wx": {
|
||||||
|
"Param": [
|
||||||
|
{
|
||||||
|
"name": "FDm",
|
||||||
|
"units": "C",
|
||||||
|
"$": "Feels Like Day Maximum Temperature"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "FNm",
|
||||||
|
"units": "C",
|
||||||
|
"$": "Feels Like Night Minimum Temperature"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Dm",
|
||||||
|
"units": "C",
|
||||||
|
"$": "Day Maximum Temperature"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Nm",
|
||||||
|
"units": "C",
|
||||||
|
"$": "Night Minimum Temperature"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Gn",
|
||||||
|
"units": "mph",
|
||||||
|
"$": "Wind Gust Noon"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Gm",
|
||||||
|
"units": "mph",
|
||||||
|
"$": "Wind Gust Midnight"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Hn",
|
||||||
|
"units": "%",
|
||||||
|
"$": "Screen Relative Humidity Noon"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Hm",
|
||||||
|
"units": "%",
|
||||||
|
"$": "Screen Relative Humidity Midnight"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "V",
|
||||||
|
"units": "",
|
||||||
|
"$": "Visibility"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "D",
|
||||||
|
"units": "compass",
|
||||||
|
"$": "Wind Direction"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "S",
|
||||||
|
"units": "mph",
|
||||||
|
"$": "Wind Speed"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "U",
|
||||||
|
"units": "",
|
||||||
|
"$": "Max UV Index"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "W",
|
||||||
|
"units": "",
|
||||||
|
"$": "Weather Type"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PPd",
|
||||||
|
"units": "%",
|
||||||
|
"$": "Precipitation Probability Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "PPn",
|
||||||
|
"units": "%",
|
||||||
|
"$": "Precipitation Probability Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"DV": {
|
||||||
|
"dataDate": "2020-04-25T08:00:00Z",
|
||||||
|
"type": "Forecast",
|
||||||
|
"Location": {
|
||||||
|
"i": "322380",
|
||||||
|
"lat": "52.7561",
|
||||||
|
"lon": "0.4019",
|
||||||
|
"name": "KING'S LYNN",
|
||||||
|
"country": "ENGLAND",
|
||||||
|
"continent": "EUROPE",
|
||||||
|
"elevation": "5.0",
|
||||||
|
"Period": [
|
||||||
|
{
|
||||||
|
"type": "Day",
|
||||||
|
"value": "2020-04-25Z",
|
||||||
|
"Rep": [
|
||||||
|
{
|
||||||
|
"D": "ESE",
|
||||||
|
"Gn": "4",
|
||||||
|
"Hn": "75",
|
||||||
|
"PPd": "9",
|
||||||
|
"S": "4",
|
||||||
|
"V": "VG",
|
||||||
|
"Dm": "9",
|
||||||
|
"FDm": "8",
|
||||||
|
"W": "8",
|
||||||
|
"U": "3",
|
||||||
|
"$": "Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"D": "SSE",
|
||||||
|
"Gm": "16",
|
||||||
|
"Hm": "84",
|
||||||
|
"PPn": "0",
|
||||||
|
"S": "7",
|
||||||
|
"V": "VG",
|
||||||
|
"Nm": "7",
|
||||||
|
"FNm": "5",
|
||||||
|
"W": "0",
|
||||||
|
"$": "Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "Day",
|
||||||
|
"value": "2020-04-26Z",
|
||||||
|
"Rep": [
|
||||||
|
{
|
||||||
|
"D": "SSW",
|
||||||
|
"Gn": "13",
|
||||||
|
"Hn": "69",
|
||||||
|
"PPd": "0",
|
||||||
|
"S": "9",
|
||||||
|
"V": "VG",
|
||||||
|
"Dm": "13",
|
||||||
|
"FDm": "11",
|
||||||
|
"W": "1",
|
||||||
|
"U": "4",
|
||||||
|
"$": "Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"D": "SSW",
|
||||||
|
"Gm": "13",
|
||||||
|
"Hm": "75",
|
||||||
|
"PPn": "5",
|
||||||
|
"S": "7",
|
||||||
|
"V": "GO",
|
||||||
|
"Nm": "11",
|
||||||
|
"FNm": "10",
|
||||||
|
"W": "7",
|
||||||
|
"$": "Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "Day",
|
||||||
|
"value": "2020-04-27Z",
|
||||||
|
"Rep": [
|
||||||
|
{
|
||||||
|
"D": "NW",
|
||||||
|
"Gn": "11",
|
||||||
|
"Hn": "78",
|
||||||
|
"PPd": "36",
|
||||||
|
"S": "4",
|
||||||
|
"V": "VG",
|
||||||
|
"Dm": "10",
|
||||||
|
"FDm": "9",
|
||||||
|
"W": "7",
|
||||||
|
"U": "3",
|
||||||
|
"$": "Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"D": "SE",
|
||||||
|
"Gm": "13",
|
||||||
|
"Hm": "85",
|
||||||
|
"PPn": "9",
|
||||||
|
"S": "7",
|
||||||
|
"V": "VG",
|
||||||
|
"Nm": "9",
|
||||||
|
"FNm": "7",
|
||||||
|
"W": "7",
|
||||||
|
"$": "Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "Day",
|
||||||
|
"value": "2020-04-28Z",
|
||||||
|
"Rep": [
|
||||||
|
{
|
||||||
|
"D": "ESE",
|
||||||
|
"Gn": "13",
|
||||||
|
"Hn": "77",
|
||||||
|
"PPd": "14",
|
||||||
|
"S": "7",
|
||||||
|
"V": "GO",
|
||||||
|
"Dm": "11",
|
||||||
|
"FDm": "9",
|
||||||
|
"W": "7",
|
||||||
|
"U": "3",
|
||||||
|
"$": "Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"D": "SSE",
|
||||||
|
"Gm": "13",
|
||||||
|
"Hm": "87",
|
||||||
|
"PPn": "11",
|
||||||
|
"S": "7",
|
||||||
|
"V": "GO",
|
||||||
|
"Nm": "9",
|
||||||
|
"FNm": "7",
|
||||||
|
"W": "7",
|
||||||
|
"$": "Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "Day",
|
||||||
|
"value": "2020-04-29Z",
|
||||||
|
"Rep": [
|
||||||
|
{
|
||||||
|
"D": "SSE",
|
||||||
|
"Gn": "20",
|
||||||
|
"Hn": "75",
|
||||||
|
"PPd": "8",
|
||||||
|
"S": "11",
|
||||||
|
"V": "VG",
|
||||||
|
"Dm": "12",
|
||||||
|
"FDm": "10",
|
||||||
|
"W": "7",
|
||||||
|
"U": "3",
|
||||||
|
"$": "Day"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"D": "SSE",
|
||||||
|
"Gm": "20",
|
||||||
|
"Hm": "86",
|
||||||
|
"PPn": "20",
|
||||||
|
"S": "11",
|
||||||
|
"V": "VG",
|
||||||
|
"Nm": "9",
|
||||||
|
"FNm": "7",
|
||||||
|
"W": "7",
|
||||||
|
"$": "Night"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user