mirror of
https://github.com/home-assistant/core.git
synced 2025-07-20 19:57:07 +00:00
Bump SQLAlchemy to 2.0.21 (#99745)
This commit is contained in:
parent
8b5129a7d9
commit
6f8734167f
8
.github/workflows/wheels.yml
vendored
8
.github/workflows/wheels.yml
vendored
@ -56,7 +56,7 @@ jobs:
|
|||||||
echo "CI_BUILD=1"
|
echo "CI_BUILD=1"
|
||||||
echo "ENABLE_HEADLESS=1"
|
echo "ENABLE_HEADLESS=1"
|
||||||
|
|
||||||
# Use C-Extension for sqlalchemy
|
# Use C-Extension for SQLAlchemy
|
||||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||||
) > .env_file
|
) > .env_file
|
||||||
|
|
||||||
@ -186,7 +186,7 @@ jobs:
|
|||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||||
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
|
skip-binary: aiohttp;grpcio;SQLAlchemy;protobuf
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtaa"
|
requirements: "requirements_all.txtaa"
|
||||||
@ -200,7 +200,7 @@ jobs:
|
|||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||||
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
|
skip-binary: aiohttp;grpcio;SQLAlchemy;protobuf
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtab"
|
requirements: "requirements_all.txtab"
|
||||||
@ -214,7 +214,7 @@ jobs:
|
|||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||||
skip-binary: aiohttp;grpcio;sqlalchemy;protobuf
|
skip-binary: aiohttp;grpcio;SQLAlchemy;protobuf
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
requirements: "requirements_all.txtac"
|
requirements: "requirements_all.txtac"
|
||||||
|
@ -50,7 +50,7 @@ _BASE_STATES = (
|
|||||||
States.last_changed_ts,
|
States.last_changed_ts,
|
||||||
States.last_updated_ts,
|
States.last_updated_ts,
|
||||||
)
|
)
|
||||||
_BASE_STATES_NO_LAST_CHANGED = ( # type: ignore[var-annotated]
|
_BASE_STATES_NO_LAST_CHANGED = (
|
||||||
States.entity_id,
|
States.entity_id,
|
||||||
States.state,
|
States.state,
|
||||||
literal(value=None).label("last_changed_ts"),
|
literal(value=None).label("last_changed_ts"),
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"SQLAlchemy==2.0.15",
|
"SQLAlchemy==2.0.21",
|
||||||
"fnv-hash-fast==0.4.1",
|
"fnv-hash-fast==0.4.1",
|
||||||
"psutil-home-assistant==0.0.1"
|
"psutil-home-assistant==0.0.1"
|
||||||
]
|
]
|
||||||
|
@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/sql",
|
"documentation": "https://www.home-assistant.io/integrations/sql",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["SQLAlchemy==2.0.15"]
|
"requirements": ["SQLAlchemy==2.0.21"]
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ pyudev==0.23.2
|
|||||||
PyYAML==6.0.1
|
PyYAML==6.0.1
|
||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
scapy==2.5.0
|
scapy==2.5.0
|
||||||
SQLAlchemy==2.0.15
|
SQLAlchemy==2.0.21
|
||||||
typing-extensions>=4.8.0,<5.0
|
typing-extensions>=4.8.0,<5.0
|
||||||
ulid-transform==0.8.1
|
ulid-transform==0.8.1
|
||||||
voluptuous-serialize==2.6.0
|
voluptuous-serialize==2.6.0
|
||||||
|
@ -129,7 +129,7 @@ RtmAPI==0.7.2
|
|||||||
|
|
||||||
# homeassistant.components.recorder
|
# homeassistant.components.recorder
|
||||||
# homeassistant.components.sql
|
# homeassistant.components.sql
|
||||||
SQLAlchemy==2.0.15
|
SQLAlchemy==2.0.21
|
||||||
|
|
||||||
# homeassistant.components.travisci
|
# homeassistant.components.travisci
|
||||||
TravisPy==0.3.5
|
TravisPy==0.3.5
|
||||||
|
@ -116,7 +116,7 @@ RtmAPI==0.7.2
|
|||||||
|
|
||||||
# homeassistant.components.recorder
|
# homeassistant.components.recorder
|
||||||
# homeassistant.components.sql
|
# homeassistant.components.sql
|
||||||
SQLAlchemy==2.0.15
|
SQLAlchemy==2.0.21
|
||||||
|
|
||||||
# homeassistant.components.onvif
|
# homeassistant.components.onvif
|
||||||
WSDiscovery==2.0.0
|
WSDiscovery==2.0.0
|
||||||
|
@ -39,6 +39,12 @@ SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
|
|||||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_wait_migration_done(hass: HomeAssistant) -> None:
|
||||||
|
"""Wait for the migration to be done."""
|
||||||
|
await recorder.get_instance(hass).async_block_till_done()
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
|
||||||
def _create_engine_test(*args, **kwargs):
|
def _create_engine_test(*args, **kwargs):
|
||||||
"""Test version of create_engine that initializes with old schema.
|
"""Test version of create_engine that initializes with old schema.
|
||||||
|
|
||||||
@ -101,6 +107,8 @@ async def test_migrate_events_context_ids(
|
|||||||
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
test_uuid = uuid.uuid4()
|
test_uuid = uuid.uuid4()
|
||||||
uuid_hex = test_uuid.hex
|
uuid_hex = test_uuid.hex
|
||||||
@ -110,7 +118,7 @@ async def test_migrate_events_context_ids(
|
|||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add_all(
|
session.add_all(
|
||||||
(
|
(
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="old_uuid_context_id_event",
|
event_type="old_uuid_context_id_event",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -123,7 +131,7 @@ async def test_migrate_events_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="empty_context_id_event",
|
event_type="empty_context_id_event",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -136,7 +144,7 @@ async def test_migrate_events_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="ulid_context_id_event",
|
event_type="ulid_context_id_event",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -149,7 +157,7 @@ async def test_migrate_events_context_ids(
|
|||||||
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="invalid_context_id_event",
|
event_type="invalid_context_id_event",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -162,7 +170,7 @@ async def test_migrate_events_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="garbage_context_id_event",
|
event_type="garbage_context_id_event",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -175,7 +183,7 @@ async def test_migrate_events_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_with_garbage_context_id_no_time_fired_ts",
|
event_type="event_with_garbage_context_id_no_time_fired_ts",
|
||||||
event_data=None,
|
event_data=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
@ -196,10 +204,12 @@ async def test_migrate_events_context_ids(
|
|||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6]
|
expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6]
|
||||||
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
with freeze_time(now):
|
with freeze_time(now):
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
instance.queue_task(EventsContextIDMigrationTask())
|
instance.queue_task(EventsContextIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
def _object_as_dict(obj):
|
def _object_as_dict(obj):
|
||||||
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
||||||
@ -304,6 +314,8 @@ async def test_migrate_states_context_ids(
|
|||||||
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
test_uuid = uuid.uuid4()
|
test_uuid = uuid.uuid4()
|
||||||
uuid_hex = test_uuid.hex
|
uuid_hex = test_uuid.hex
|
||||||
@ -313,7 +325,7 @@ async def test_migrate_states_context_ids(
|
|||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add_all(
|
session.add_all(
|
||||||
(
|
(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.old_uuid_context_id",
|
entity_id="state.old_uuid_context_id",
|
||||||
last_updated_ts=1477721632.452529,
|
last_updated_ts=1477721632.452529,
|
||||||
context_id=uuid_hex,
|
context_id=uuid_hex,
|
||||||
@ -323,7 +335,7 @@ async def test_migrate_states_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.empty_context_id",
|
entity_id="state.empty_context_id",
|
||||||
last_updated_ts=1477721632.552529,
|
last_updated_ts=1477721632.552529,
|
||||||
context_id=None,
|
context_id=None,
|
||||||
@ -333,7 +345,7 @@ async def test_migrate_states_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.ulid_context_id",
|
entity_id="state.ulid_context_id",
|
||||||
last_updated_ts=1477721632.552529,
|
last_updated_ts=1477721632.552529,
|
||||||
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
||||||
@ -343,7 +355,7 @@ async def test_migrate_states_context_ids(
|
|||||||
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.invalid_context_id",
|
entity_id="state.invalid_context_id",
|
||||||
last_updated_ts=1477721632.552529,
|
last_updated_ts=1477721632.552529,
|
||||||
context_id="invalid",
|
context_id="invalid",
|
||||||
@ -353,7 +365,7 @@ async def test_migrate_states_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.garbage_context_id",
|
entity_id="state.garbage_context_id",
|
||||||
last_updated_ts=1477721632.552529,
|
last_updated_ts=1477721632.552529,
|
||||||
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
||||||
@ -363,7 +375,7 @@ async def test_migrate_states_context_ids(
|
|||||||
context_parent_id=None,
|
context_parent_id=None,
|
||||||
context_parent_id_bin=None,
|
context_parent_id_bin=None,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="state.human_readable_uuid_context_id",
|
entity_id="state.human_readable_uuid_context_id",
|
||||||
last_updated_ts=1477721632.552529,
|
last_updated_ts=1477721632.552529,
|
||||||
context_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65",
|
context_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65",
|
||||||
@ -380,7 +392,7 @@ async def test_migrate_states_context_ids(
|
|||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
instance.queue_task(StatesContextIDMigrationTask())
|
instance.queue_task(StatesContextIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
def _object_as_dict(obj):
|
def _object_as_dict(obj):
|
||||||
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
||||||
@ -489,22 +501,24 @@ async def test_migrate_event_type_ids(
|
|||||||
"""Test we can migrate event_types to the EventTypes table."""
|
"""Test we can migrate event_types to the EventTypes table."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
def _insert_events():
|
def _insert_events():
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add_all(
|
session.add_all(
|
||||||
(
|
(
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_type_one",
|
event_type="event_type_one",
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=1677721632.452529,
|
time_fired_ts=1677721632.452529,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_type_one",
|
event_type="event_type_one",
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=1677721632.552529,
|
time_fired_ts=1677721632.552529,
|
||||||
),
|
),
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_type_two",
|
event_type="event_type_two",
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=1677721632.552529,
|
time_fired_ts=1677721632.552529,
|
||||||
@ -517,7 +531,7 @@ async def test_migrate_event_type_ids(
|
|||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
instance.queue_task(EventTypeIDMigrationTask())
|
instance.queue_task(EventTypeIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
def _fetch_migrated_events():
|
def _fetch_migrated_events():
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
@ -570,22 +584,24 @@ async def test_migrate_entity_ids(
|
|||||||
"""Test we can migrate entity_ids to the StatesMeta table."""
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
def _insert_states():
|
def _insert_states():
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add_all(
|
session.add_all(
|
||||||
(
|
(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.one",
|
entity_id="sensor.one",
|
||||||
state="one_1",
|
state="one_1",
|
||||||
last_updated_ts=1.452529,
|
last_updated_ts=1.452529,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.two",
|
entity_id="sensor.two",
|
||||||
state="two_2",
|
state="two_2",
|
||||||
last_updated_ts=2.252529,
|
last_updated_ts=2.252529,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.two",
|
entity_id="sensor.two",
|
||||||
state="two_1",
|
state="two_1",
|
||||||
last_updated_ts=3.152529,
|
last_updated_ts=3.152529,
|
||||||
@ -595,10 +611,10 @@ async def test_migrate_entity_ids(
|
|||||||
|
|
||||||
await instance.async_add_executor_job(_insert_states)
|
await instance.async_add_executor_job(_insert_states)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
instance.queue_task(EntityIDMigrationTask())
|
instance.queue_task(EntityIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
def _fetch_migrated_states():
|
def _fetch_migrated_states():
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
@ -636,22 +652,24 @@ async def test_post_migrate_entity_ids(
|
|||||||
"""Test we can migrate entity_ids to the StatesMeta table."""
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
def _insert_events():
|
def _insert_events():
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add_all(
|
session.add_all(
|
||||||
(
|
(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.one",
|
entity_id="sensor.one",
|
||||||
state="one_1",
|
state="one_1",
|
||||||
last_updated_ts=1.452529,
|
last_updated_ts=1.452529,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.two",
|
entity_id="sensor.two",
|
||||||
state="two_2",
|
state="two_2",
|
||||||
last_updated_ts=2.252529,
|
last_updated_ts=2.252529,
|
||||||
),
|
),
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.two",
|
entity_id="sensor.two",
|
||||||
state="two_1",
|
state="two_1",
|
||||||
last_updated_ts=3.152529,
|
last_updated_ts=3.152529,
|
||||||
@ -661,10 +679,10 @@ async def test_post_migrate_entity_ids(
|
|||||||
|
|
||||||
await instance.async_add_executor_job(_insert_events)
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
instance.queue_task(EntityIDPostMigrationTask())
|
instance.queue_task(EntityIDPostMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
|
|
||||||
def _fetch_migrated_states():
|
def _fetch_migrated_states():
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
@ -688,18 +706,20 @@ async def test_migrate_null_entity_ids(
|
|||||||
"""Test we can migrate entity_ids to the StatesMeta table."""
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
def _insert_states():
|
def _insert_states():
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add(
|
session.add(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.one",
|
entity_id="sensor.one",
|
||||||
state="one_1",
|
state="one_1",
|
||||||
last_updated_ts=1.452529,
|
last_updated_ts=1.452529,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
session.add_all(
|
session.add_all(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id=None,
|
entity_id=None,
|
||||||
state="empty",
|
state="empty",
|
||||||
last_updated_ts=time + 1.452529,
|
last_updated_ts=time + 1.452529,
|
||||||
@ -707,7 +727,7 @@ async def test_migrate_null_entity_ids(
|
|||||||
for time in range(1000)
|
for time in range(1000)
|
||||||
)
|
)
|
||||||
session.add(
|
session.add(
|
||||||
States(
|
old_db_schema.States(
|
||||||
entity_id="sensor.one",
|
entity_id="sensor.one",
|
||||||
state="one_1",
|
state="one_1",
|
||||||
last_updated_ts=2.452529,
|
last_updated_ts=2.452529,
|
||||||
@ -716,11 +736,10 @@ async def test_migrate_null_entity_ids(
|
|||||||
|
|
||||||
await instance.async_add_executor_job(_insert_states)
|
await instance.async_add_executor_job(_insert_states)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
instance.queue_task(EntityIDMigrationTask())
|
instance.queue_task(EntityIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
await async_recorder_block_till_done(hass)
|
|
||||||
|
|
||||||
def _fetch_migrated_states():
|
def _fetch_migrated_states():
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
@ -758,18 +777,20 @@ async def test_migrate_null_event_type_ids(
|
|||||||
"""Test we can migrate event_types to the EventTypes table when the event_type is NULL."""
|
"""Test we can migrate event_types to the EventTypes table when the event_type is NULL."""
|
||||||
instance = await async_setup_recorder_instance(hass)
|
instance = await async_setup_recorder_instance(hass)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
def _insert_events():
|
def _insert_events():
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
session.add(
|
session.add(
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_type_one",
|
event_type="event_type_one",
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=1.452529,
|
time_fired_ts=1.452529,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
session.add_all(
|
session.add_all(
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type=None,
|
event_type=None,
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=time + 1.452529,
|
time_fired_ts=time + 1.452529,
|
||||||
@ -777,7 +798,7 @@ async def test_migrate_null_event_type_ids(
|
|||||||
for time in range(1000)
|
for time in range(1000)
|
||||||
)
|
)
|
||||||
session.add(
|
session.add(
|
||||||
Events(
|
old_db_schema.Events(
|
||||||
event_type="event_type_one",
|
event_type="event_type_one",
|
||||||
origin_idx=0,
|
origin_idx=0,
|
||||||
time_fired_ts=2.452529,
|
time_fired_ts=2.452529,
|
||||||
@ -786,12 +807,10 @@ async def test_migrate_null_event_type_ids(
|
|||||||
|
|
||||||
await instance.async_add_executor_job(_insert_events)
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
# This is a threadsafe way to add a task to the recorder
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
|
||||||
instance.queue_task(EventTypeIDMigrationTask())
|
instance.queue_task(EventTypeIDMigrationTask())
|
||||||
await async_recorder_block_till_done(hass)
|
await _async_wait_migration_done(hass)
|
||||||
await async_recorder_block_till_done(hass)
|
|
||||||
|
|
||||||
def _fetch_migrated_events():
|
def _fetch_migrated_events():
|
||||||
with session_scope(hass=hass, read_only=True) as session:
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user