Implement shared PlatformIO cache for integration tests (#9413)

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
J. Nick Koston 2025-07-09 14:42:24 -10:00 committed by GitHub
parent e9c7596e00
commit 97dd96b60d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -5,12 +5,14 @@ from __future__ import annotations
import asyncio import asyncio
from collections.abc import AsyncGenerator, Callable, Generator from collections.abc import AsyncGenerator, Callable, Generator
from contextlib import AbstractAsyncContextManager, asynccontextmanager from contextlib import AbstractAsyncContextManager, asynccontextmanager
import fcntl
import logging import logging
import os import os
from pathlib import Path from pathlib import Path
import platform import platform
import signal import signal
import socket import socket
import subprocess
import sys import sys
import tempfile import tempfile
from typing import TextIO from typing import TextIO
@ -50,6 +52,66 @@ if platform.system() == "Windows":
import pty # not available on Windows import pty # not available on Windows
def _get_platformio_env(cache_dir: Path) -> dict[str, str]:
"""Get environment variables for PlatformIO with shared cache."""
env = os.environ.copy()
env["PLATFORMIO_CORE_DIR"] = str(cache_dir)
env["PLATFORMIO_CACHE_DIR"] = str(cache_dir / ".cache")
env["PLATFORMIO_LIBDEPS_DIR"] = str(cache_dir / "libdeps")
return env
@pytest.fixture(scope="session")
def shared_platformio_cache() -> Generator[Path]:
"""Initialize a shared PlatformIO cache for all integration tests."""
# Use a dedicated directory for integration tests to avoid conflicts
test_cache_dir = Path.home() / ".esphome-integration-tests"
cache_dir = test_cache_dir / "platformio"
# Use a lock file in the home directory to ensure only one process initializes the cache
# This is needed when running with pytest-xdist
# The lock file must be in a directory that already exists to avoid race conditions
lock_file = Path.home() / ".esphome-integration-tests-init.lock"
# Always acquire the lock to ensure cache is ready before proceeding
with open(lock_file, "w") as lock_fd:
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX)
# Check if cache needs initialization while holding the lock
if not cache_dir.exists() or not any(cache_dir.iterdir()):
# Create the test cache directory if it doesn't exist
test_cache_dir.mkdir(exist_ok=True)
with tempfile.TemporaryDirectory() as tmpdir:
# Create a basic host config
init_dir = Path(tmpdir)
config_path = init_dir / "cache_init.yaml"
config_path.write_text("""esphome:
name: cache-init
host:
api:
encryption:
key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w="
logger:
""")
# Run compilation to populate the cache
# We must succeed here to avoid race conditions where multiple
# tests try to populate the same cache directory simultaneously
env = _get_platformio_env(cache_dir)
subprocess.run(
["esphome", "compile", str(config_path)],
check=True,
cwd=init_dir,
env=env,
)
# Lock is held until here, ensuring cache is fully populated before any test proceeds
yield cache_dir
@pytest.fixture(scope="module", autouse=True) @pytest.fixture(scope="module", autouse=True)
def enable_aioesphomeapi_debug_logging(): def enable_aioesphomeapi_debug_logging():
"""Enable debug logging for aioesphomeapi to help diagnose connection issues.""" """Enable debug logging for aioesphomeapi to help diagnose connection issues."""
@ -161,22 +223,14 @@ async def write_yaml_config(
@pytest_asyncio.fixture @pytest_asyncio.fixture
async def compile_esphome( async def compile_esphome(
integration_test_dir: Path, integration_test_dir: Path,
shared_platformio_cache: Path,
) -> AsyncGenerator[CompileFunction]: ) -> AsyncGenerator[CompileFunction]:
"""Compile an ESPHome configuration and return the binary path.""" """Compile an ESPHome configuration and return the binary path."""
async def _compile(config_path: Path) -> Path: async def _compile(config_path: Path) -> Path:
# Create a unique PlatformIO directory for this test to avoid race conditions # Use the shared PlatformIO cache for faster compilation
platformio_dir = integration_test_dir / ".platformio" # This avoids re-downloading dependencies for each test
platformio_dir.mkdir(parents=True, exist_ok=True) env = _get_platformio_env(shared_platformio_cache)
# Create cache directory as well
platformio_cache_dir = platformio_dir / ".cache"
platformio_cache_dir.mkdir(parents=True, exist_ok=True)
# Set up environment with isolated PlatformIO directories
env = os.environ.copy()
env["PLATFORMIO_CORE_DIR"] = str(platformio_dir)
env["PLATFORMIO_CACHE_DIR"] = str(platformio_cache_dir)
# Retry compilation up to 3 times if we get a segfault # Retry compilation up to 3 times if we get a segfault
max_retries = 3 max_retries = 3