1
0
mirror of https://github.com/home-assistant/core.git synced 2025-08-21 19:30:02 +00:00
Files
.devcontainer
.github
.vscode
docs
homeassistant
machine
pylint
rootfs
script
tests
auth
backports
components
abode
accuweather
acmeda
adax
adguard
advantage_air
aemet
agent_dvr
air_quality
airly
airnow
airq
airthings
airthings_ble
airtouch4
airvisual
airvisual_pro
airzone
aladdin_connect
alarm_control_panel
alarmdecoder
alert
alexa
amberelectric
ambiclimate
ambient_station
analytics
android_ip_webcam
androidtv
anthemav
apache_kafka
apcupsd
api
api_streams
apple_tv
application_credentials
apprise
aprs
aranet
arcam_fmj
aseko_pool_live
asuswrt
atag
august
aurora
aurora_abb_powerone
aussie_broadband
auth
automation
awair
aws
axis
azure_devops
azure_event_hub
backup
baf
balboa
bayesian
binary_sensor
blackbird
blebox
blink
bluemaestro
blueprint
bluetooth
bluetooth_adapters
bluetooth_le_tracker
bmw_connected_drive
bond
bosch_shc
braviatv
broadlink
brother
brunt
bsblan
bthome
buienradar
button
caldav
calendar
camera
canary
cast
cert_expiry
clicksend_tts
climate
cloud
cloudflare
co2signal
coinbase
color_extractor
comfoconnect
command_line
compensation
config
configurator
control4
conversation
coolmaster
coronavirus
counter
cover
cpuspeed
crownstone
daikin
darksky
datadog
debugpy
deconz
default_config
deluge
demo
denonavr
derivative
device_automation
device_sun_light_trigger
device_tracker
devolo_home_control
devolo_home_network
dexcom
dhcp
diagnostics
dialogflow
directv
discord
discovery
dlink
dlna_dmr
dlna_dms
dnsip
doorbird
dsmr
dsmr_reader
dte_energy_bridge
duckdns
dunehd
dynalite
eafm
easyenergy
ecobee
econet
ecowitt
efergy
eight_sleep
elgato
elkm1
elmax
emonitor
emulated_hue
emulated_kasa
emulated_roku
energy
energyzero
enocean
enphase_envoy
environment_canada
epson
escea
esphome
eufylife_ble
everlights
evil_genius_labs
ezviz
faa_delays
facebook
facebox
fail2ban
fan
feedreader
ffmpeg
fibaro
fido
file
file_upload
filesize
filter
fireservicerota
firmata
fivem
fjaraskupan
flic
flick_electric
flipr
flo
flume
flux
flux_led
folder
folder_watcher
foobot
forecast_solar
forked_daapd
foscam
freebox
freedns
freedompro
fritz
fritzbox
fritzbox_callmonitor
fronius
frontend
fully_kiosk
garages_amsterdam
gdacs
generic
generic_hygrostat
generic_thermostat
geo_json_events
geo_location
geo_rss_events
geocaching
geofency
geonetnz_quakes
geonetnz_volcano
gios
github
glances
goalzero
gogogate2
goodwe
google
google_assistant
google_assistant_sdk
google_domains
google_mail
google_pubsub
google_sheets
google_translate
google_travel_time
google_wifi
govee_ble
gpslogger
graphite
gree
greeneye_monitor
group
growatt_server
guardian
habitica
hardkernel
hardware
harmony
hassio
hddtemp
hdmi_cec
heos
here_travel_time
hisense_aehw4a1
history
history_stats
hive
hlk_sw16
home_connect
home_plus_control
homeassistant
homeassistant_alerts
homeassistant_hardware
homeassistant_sky_connect
homeassistant_yellow
homekit
homekit_controller
homematic
homematicip_cloud
homewizard
honeywell
html5
http
huawei_lte
hue
huisbaasje
humidifier
hunterdouglas_powerview
hvv_departures
hyperion
ialarm
iaqualink
ibeacon
icloud
ifttt
ign_sismologia
image_processing
image_upload
imap
imap_email_content
influxdb
__init__.py
test_init.py
test_sensor.py
inkbird
input_boolean
input_button
input_datetime
input_number
input_select
input_text
insteon
integration
intellifire
intent
intent_script
ios
iotawatt
ipma
ipp
iqvia
islamic_prayer_times
iss
isy994
izone
jellyfin
jewish_calendar
juicenet
justnimbus
kaleidescape
keenetic_ndms2
kegtron
keymitt_ble
kira
kitchen_sink
kmtronic
knx
kodi
konnected
kostal_plenticore
kraken
kulersky
lacrosse_view
lametric
landisgyr_heat_meter
lastfm
launch_library
laundrify
lcn
ld2410_ble
led_ble
lg_soundbar
lidarr
life360
lifx
light
litejet
litterrobot
livisi
local_calendar
local_file
local_ip
locative
lock
logbook
logentries
logger
logi_circle
london_air
lookin
lovelace
luftdaten
lutron_caseta
lyric
mailbox
mailgun
manual
manual_mqtt
marytts
matter
maxcube
mazda
meater
media_player
media_source
melcloud
melissa
melnor
meraki
met
met_eireann
meteo_france
meteoclimatic
metoffice
mfi
microsoft_face
microsoft_face_detect
microsoft_face_identify
mikrotik
mill
min_max
minecraft_server
minio
mjpeg
moat
mobile_app
mochad
modbus
modem_callerid
modern_forms
moehlenhoff_alpha2
mold_indicator
monoprice
moon
mopeka
motion_blinds
motioneye
mqtt
mqtt_eventstream
mqtt_json
mqtt_room
mqtt_statestream
mullvad
mutesync
my
myq
mysensors
mythicbeastsdns
nam
namecheapdns
nanoleaf
neato
ness_alarm
nest
netatmo
netgear
network
nexia
nextbus
nextdns
nfandroidtv
nibe_heatpump
nightscout
nina
nmap_tracker
no_ip
nobo_hub
notify
notify_events
notion
nsw_fuel_station
nsw_rural_fire_service_feed
nuheat
nuki
numato
number
nut
nws
nx584
nzbget
octoprint
omnilogic
onboarding
oncue
ondilo_ico
onewire
onvif
open_meteo
openai_conversation
openalpr_cloud
openerz
openexchangerates
opengarage
openhardwaremonitor
opentherm_gw
openuv
openweathermap
opnsense
oralb
otbr
overkiz
ovo_energy
owntracks
p1_monitor
panasonic_viera
panel_custom
panel_iframe
peco
persistent_notification
person
philips_js
pi_hole
picnic
pilight
ping
plaato
plant
plex
plugwise
plum_lightpad
point
poolsense
powerwall
profiler
progettihwsw
prometheus
prosegur
proximity
prusalink
ps4
pure_energie
purpleair
push
pushbullet
pushover
pvoutput
pvpc_hourly_pricing
python_script
qingping
qld_bushfire
qnap_qsw
qwikswitch
rachio
radarr
radio_browser
radiotherm
rainbird
rainforest_eagle
rainmachine
random
raspberry_pi
rdw
recollect_waste
recorder
reddit
remember_the_milk
remote
renault
reolink
repairs
rest
rest_command
rflink
rfxtrx
rhasspy
ridwell
ring
risco
rituals_perfume_genie
rmvtransport
roku
roomba
roon
rpi_power
rss_feed_template
rtsp_to_webrtc
ruckus_unleashed
ruuvi_gateway
ruuvitag_ble
rympro
sabnzbd
safe_mode
samsungtv
scene
schedule
scrape
screenlogic
script
search
season
select
sense
senseme
sensibo
sensirion_ble
sensor
sensorpro
sensorpush
sentry
senz
seventeentrack
sfr_box
sharkiq
shell_command
shelly
shopping_list
sia
sigfox
sighthound
signal_messenger
simplepush
simplisafe
simulated
siren
skybell
slack
sleepiq
slimproto
sma
smappee
smart_meter_texas
smartthings
smarttub
smhi
smtp
snips
snmp
snooz
solaredge
solarlog
solax
soma
somfy_mylink
sonarr
songpal
sonos
soundtouch
spaceapi
spc
speedtestdotnet
spider
spotify
sql
squeezebox
srp_energy
ssdp
starline
starlink
startca
statistics
statsd
steam_online
steamist
stookalert
stookwijzer
stream
stt
subaru
sun
surepetcare
switch
switch_as_x
switchbee
switchbot
switcher_kis
syncthing
syncthru
synology_dsm
system_bridge
system_health
system_log
tado
tag
tailscale
tankerkoenig
tasmota
tautulli
tcp
telegram
telegram_bot
tellduslive
template
tesla_wall_connector
text
thermobeacon
thermopro
thread
threshold
tibber
tile
tilt_ble
time_date
timer
tod
todoist
tolo
tomato
tomorrowio
toon
totalconnect
tplink
tplink_omada
traccar
trace
tractive
tradfri
trafikverket_ferry
trafikverket_train
trafikverket_weatherstation
transmission
transport_nsw
trend
tts
tuya
twentemilieu
twilio
twinkly
twitch
uk_transport
ukraine_alarm
unifi
unifi_direct
unifiprotect
universal
upb
upcloud
update
upnp
uptime
uptimerobot
usb
usgs_earthquakes_feed
utility_meter
uvc
vacuum
vallox
velbus
venstar
vera
verisure
version
vesync
vicare
vilfo
vizio
vlc_telnet
voicerss
volumio
volvooncall
vulcan
vultr
wake_on_lan
wallbox
water_heater
watttime
waze_travel_time
weather
webhook
webostv
websocket_api
wemo
whirlpool
whois
wiffi
wilight
withings
wiz
wled
wolflink
workday
worldclock
ws66i
wsdot
xbox
xiaomi
xiaomi_aqara
xiaomi_ble
xiaomi_miio
yale_smart_alarm
yalexs_ble
yamaha
yamaha_musiccast
yandex_transport
yandextts
yeelight
yolink
youless
zamg
zeroconf
zerproc
zeversolar
zha
zodiac
zone
zwave_js
zwave_me
__init__.py
conftest.py
fixtures
hassfest
helpers
pylint
resources
scripts
test_util
testing_config
util
__init__.py
bandit.yaml
common.py
conftest.py
ignore_uncaught_exceptions.py
ruff.toml
test_bootstrap.py
test_config.py
test_config_entries.py
test_core.py
test_data_entry_flow.py
test_exceptions.py
test_loader.py
test_main.py
test_requirements.py
test_runner.py
test_setup.py
test_test_fixtures.py
typing.py
.core_files.yaml
.coveragerc
.dockerignore
.gitattributes
.gitignore
.hadolint.yaml
.pre-commit-config.yaml
.prettierignore
.readthedocs.yml
.strict-typing
.yamllint
CLA.md
CODEOWNERS
CODE_OF_CONDUCT.md
CONTRIBUTING.md
Dockerfile
Dockerfile.dev
LICENSE.md
MANIFEST.in
README.rst
build.yaml
codecov.yml
mypy.ini
pyproject.toml
requirements.txt
requirements_all.txt
requirements_docs.txt
requirements_test.txt
requirements_test_all.txt
requirements_test_pre_commit.txt
setup.cfg
typescript
core/tests/components/influxdb/test_sensor.py
2023-01-10 16:41:16 +01:00

631 lines
18 KiB
Python

"""The tests for the InfluxDB sensor."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
from http import HTTPStatus
from unittest.mock import MagicMock, patch
from influxdb.exceptions import InfluxDBClientError, InfluxDBServerError
from influxdb_client.rest import ApiException
import pytest
from voluptuous import Invalid
from homeassistant.components.influxdb.const import (
API_VERSION_2,
DEFAULT_API_VERSION,
DEFAULT_BUCKET,
DEFAULT_DATABASE,
DOMAIN,
TEST_QUERY_V1,
TEST_QUERY_V2,
)
from homeassistant.components.influxdb.sensor import PLATFORM_SCHEMA
import homeassistant.components.sensor as sensor
from homeassistant.const import STATE_UNKNOWN
from homeassistant.helpers.entity_platform import PLATFORM_NOT_READY_BASE_WAIT_TIME
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import async_fire_time_changed
INFLUXDB_PATH = "homeassistant.components.influxdb"
INFLUXDB_CLIENT_PATH = f"{INFLUXDB_PATH}.InfluxDBClient"
INFLUXDB_SENSOR_PATH = f"{INFLUXDB_PATH}.sensor"
BASE_V1_CONFIG = {}
BASE_V2_CONFIG = {
"api_version": API_VERSION_2,
"organization": "org",
"token": "token",
}
BASE_V1_QUERY = {
"queries": [
{
"name": "test",
"unique_id": "unique_test_id",
"measurement": "measurement",
"where": "where",
"field": "field",
}
],
}
BASE_V2_QUERY = {
"queries_flux": [
{
"name": "test",
"unique_id": "unique_test_id",
"query": "query",
}
]
}
@dataclass
class Record:
"""Record in a Table."""
values: dict
@dataclass
class Table:
"""Table in an Influx 2 resultset."""
records: list[type[Record]]
@pytest.fixture(name="mock_client")
def mock_client_fixture(request):
"""Patch the InfluxDBClient object with mock for version under test."""
if request.param == API_VERSION_2:
client_target = f"{INFLUXDB_CLIENT_PATH}V2"
else:
client_target = INFLUXDB_CLIENT_PATH
with patch(client_target) as client:
yield client
@pytest.fixture(autouse=True, scope="module")
def mock_client_close():
"""Mock close method of clients at module scope."""
with patch(f"{INFLUXDB_CLIENT_PATH}.close") as close_v1, patch(
f"{INFLUXDB_CLIENT_PATH}V2.close"
) as close_v2:
yield (close_v1, close_v2)
def _make_v1_resultset(*args):
"""Create a mock V1 resultset."""
for arg in args:
yield {"value": arg}
def _make_v1_databases_resultset():
"""Create a mock V1 'show databases' resultset."""
for name in [DEFAULT_DATABASE, "db2"]:
yield {"name": name}
def _make_v2_resultset(*args):
"""Create a mock V2 resultset."""
tables = []
for arg in args:
values = {"_value": arg}
record = Record(values)
tables.append(Table([record]))
return tables
def _make_v2_buckets_resultset():
"""Create a mock V2 'buckets()' resultset."""
records = []
for name in [DEFAULT_BUCKET, "bucket2"]:
records.append(Record({"name": name}))
return [Table(records)]
def _set_query_mock_v1(
mock_influx_client, return_value=None, query_exception=None, side_effect=None
):
"""Set return value or side effect for the V1 client."""
query_api = mock_influx_client.return_value.query
if side_effect:
query_api.side_effect = side_effect
else:
if return_value is None:
return_value = []
def get_return_value(query, **kwargs):
"""Return mock for test query, return value otherwise."""
if query == TEST_QUERY_V1:
points = _make_v1_databases_resultset()
else:
if query_exception:
raise query_exception
points = return_value
query_output = MagicMock()
query_output.get_points.return_value = points
return query_output
query_api.side_effect = get_return_value
return query_api
def _set_query_mock_v2(
mock_influx_client, return_value=None, query_exception=None, side_effect=None
):
"""Set return value or side effect for the V2 client."""
query_api = mock_influx_client.return_value.query_api.return_value.query
if side_effect:
query_api.side_effect = side_effect
else:
if return_value is None:
return_value = []
def get_return_value(query):
"""Return buckets list for test query, return value otherwise."""
if query == TEST_QUERY_V2:
return _make_v2_buckets_resultset()
if query_exception:
raise query_exception
return return_value
query_api.side_effect = get_return_value
return query_api
async def _setup(hass, config_ext, queries, expected_sensors):
"""Create client and test expected sensors."""
config = {
DOMAIN: config_ext,
sensor.DOMAIN: {"platform": DOMAIN},
}
influx_config = config[sensor.DOMAIN]
influx_config.update(config_ext)
influx_config.update(queries)
assert await async_setup_component(hass, sensor.DOMAIN, config)
await hass.async_block_till_done()
sensors = []
for expected_sensor in expected_sensors:
state = hass.states.get(expected_sensor)
assert state is not None
sensors.append(state)
return sensors
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock",
[
(DEFAULT_API_VERSION, BASE_V1_CONFIG, BASE_V1_QUERY, _set_query_mock_v1),
(API_VERSION_2, BASE_V2_CONFIG, BASE_V2_QUERY, _set_query_mock_v2),
],
indirect=["mock_client"],
)
async def test_minimal_config(hass, mock_client, config_ext, queries, set_query_mock):
"""Test the minimal config and defaults."""
set_query_mock(mock_client)
await _setup(hass, config_ext, queries, ["sensor.test"])
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock",
[
(
DEFAULT_API_VERSION,
{
"ssl": "true",
"host": "host",
"port": "9000",
"path": "path",
"username": "user",
"password": "pass",
"database": "db",
"verify_ssl": "true",
},
{
"queries": [
{
"name": "test",
"unique_id": "unique_test_id",
"unit_of_measurement": "unit",
"measurement": "measurement",
"where": "where",
"value_template": "123",
"database": "db2",
"group_function": "fn",
"field": "field",
}
],
},
_set_query_mock_v1,
),
(
API_VERSION_2,
{
"api_version": "2",
"ssl": "true",
"host": "host",
"port": "9000",
"path": "path",
"token": "token",
"organization": "org",
"bucket": "bucket",
},
{
"queries_flux": [
{
"name": "test",
"unique_id": "unique_test_id",
"unit_of_measurement": "unit",
"range_start": "start",
"range_stop": "end",
"group_function": "fn",
"bucket": "bucket2",
"imports": "import",
"query": "query",
}
],
},
_set_query_mock_v2,
),
],
indirect=["mock_client"],
)
async def test_full_config(hass, mock_client, config_ext, queries, set_query_mock):
"""Test the full config."""
set_query_mock(mock_client)
await _setup(hass, config_ext, queries, ["sensor.test"])
@pytest.mark.parametrize("config_ext", [(BASE_V1_CONFIG), (BASE_V2_CONFIG)])
async def test_config_failure(hass, config_ext):
"""Test an invalid config."""
config = {"platform": DOMAIN}
config.update(config_ext)
with pytest.raises(Invalid):
PLATFORM_SCHEMA(config)
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock, make_resultset",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
_make_v1_resultset,
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
_make_v2_resultset,
),
],
indirect=["mock_client"],
)
async def test_state_matches_query_result(
hass, mock_client, config_ext, queries, set_query_mock, make_resultset
):
"""Test state of sensor matches response from query api."""
set_query_mock(mock_client, return_value=make_resultset(42))
sensors = await _setup(hass, config_ext, queries, ["sensor.test"])
assert sensors[0].state == "42"
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock, make_resultset",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
_make_v1_resultset,
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
_make_v2_resultset,
),
],
indirect=["mock_client"],
)
async def test_state_matches_first_query_result_for_multiple_return(
hass, caplog, mock_client, config_ext, queries, set_query_mock, make_resultset
):
"""Test state of sensor matches response from query api."""
set_query_mock(mock_client, return_value=make_resultset(42, "not used"))
sensors = await _setup(hass, config_ext, queries, ["sensor.test"])
assert sensors[0].state == "42"
assert (
len([record for record in caplog.records if record.levelname == "WARNING"]) == 1
)
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
),
(API_VERSION_2, BASE_V2_CONFIG, BASE_V2_QUERY, _set_query_mock_v2),
],
indirect=["mock_client"],
)
async def test_state_for_no_results(
hass, caplog, mock_client, config_ext, queries, set_query_mock
):
"""Test state of sensor matches response from query api."""
set_query_mock(mock_client)
sensors = await _setup(hass, config_ext, queries, ["sensor.test"])
assert sensors[0].state == STATE_UNKNOWN
assert (
len([record for record in caplog.records if record.levelname == "WARNING"]) == 1
)
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock, query_exception",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
OSError("fail"),
),
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
InfluxDBClientError("fail"),
),
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
InfluxDBClientError("fail", code=400),
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
OSError("fail"),
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
ApiException(http_resp=MagicMock()),
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
ApiException(status=HTTPStatus.BAD_REQUEST, http_resp=MagicMock()),
),
],
indirect=["mock_client"],
)
async def test_error_querying_influx(
hass, caplog, mock_client, config_ext, queries, set_query_mock, query_exception
):
"""Test behavior of sensor when influx returns error."""
set_query_mock(mock_client, query_exception=query_exception)
sensors = await _setup(hass, config_ext, queries, ["sensor.test"])
assert sensors[0].state == STATE_UNKNOWN
assert (
len([record for record in caplog.records if record.levelname == "ERROR"]) == 1
)
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock, make_resultset, key",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
{
"queries": [
{
"name": "test",
"unique_id": "unique_test_id",
"measurement": "measurement",
"where": "{{ illegal.template }}",
"field": "field",
}
]
},
_set_query_mock_v1,
_make_v1_resultset,
"where",
),
(
API_VERSION_2,
BASE_V2_CONFIG,
{
"queries_flux": [
{
"name": "test",
"unique_id": "unique_test_id",
"query": "{{ illegal.template }}",
}
]
},
_set_query_mock_v2,
_make_v2_resultset,
"query",
),
],
indirect=["mock_client"],
)
async def test_error_rendering_template(
hass, caplog, mock_client, config_ext, queries, set_query_mock, make_resultset, key
):
"""Test behavior of sensor with error rendering template."""
set_query_mock(mock_client, return_value=make_resultset(42))
sensors = await _setup(hass, config_ext, queries, ["sensor.test"])
assert sensors[0].state == STATE_UNKNOWN
assert (
len(
[
record
for record in caplog.records
if record.levelname == "ERROR"
and f"Could not render {key} template" in record.msg
]
)
== 1
)
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock, test_exception, make_resultset",
[
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
OSError("fail"),
_make_v1_resultset,
),
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
InfluxDBClientError("fail"),
_make_v1_resultset,
),
(
DEFAULT_API_VERSION,
BASE_V1_CONFIG,
BASE_V1_QUERY,
_set_query_mock_v1,
InfluxDBServerError("fail"),
_make_v1_resultset,
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
OSError("fail"),
_make_v2_resultset,
),
(
API_VERSION_2,
BASE_V2_CONFIG,
BASE_V2_QUERY,
_set_query_mock_v2,
ApiException(http_resp=MagicMock()),
_make_v2_resultset,
),
],
indirect=["mock_client"],
)
async def test_connection_error_at_startup(
hass,
caplog,
mock_client,
config_ext,
queries,
set_query_mock,
test_exception,
make_resultset,
):
"""Test behavior of sensor when influx returns error."""
query_api = set_query_mock(mock_client, side_effect=test_exception)
expected_sensor = "sensor.test"
# Test sensor is not setup first time due to connection error
await _setup(hass, config_ext, queries, [])
assert hass.states.get(expected_sensor) is None
assert (
len([record for record in caplog.records if record.levelname == "ERROR"]) == 1
)
# Stop throwing exception and advance time to test setup succeeds
query_api.reset_mock(side_effect=True)
set_query_mock(mock_client, return_value=make_resultset(42))
new_time = dt_util.utcnow() + timedelta(seconds=PLATFORM_NOT_READY_BASE_WAIT_TIME)
async_fire_time_changed(hass, new_time)
await hass.async_block_till_done()
assert hass.states.get(expected_sensor) is not None
@pytest.mark.parametrize(
"mock_client, config_ext, queries, set_query_mock",
[
(
DEFAULT_API_VERSION,
{"database": "bad_db"},
BASE_V1_QUERY,
_set_query_mock_v1,
),
(
API_VERSION_2,
{
"api_version": API_VERSION_2,
"organization": "org",
"token": "token",
"bucket": "bad_bucket",
},
BASE_V2_QUERY,
_set_query_mock_v2,
),
],
indirect=["mock_client"],
)
async def test_data_repository_not_found(
hass,
caplog,
mock_client,
config_ext,
queries,
set_query_mock,
):
"""Test sensor is not setup when bucket not available."""
set_query_mock(mock_client)
await _setup(hass, config_ext, queries, [])
assert hass.states.get("sensor.test") is None
assert (
len([record for record in caplog.records if record.levelname == "ERROR"]) == 1
)