1
0
mirror of https://github.com/esphome/esphome.git synced 2025-09-22 13:12:22 +01:00

[tests] Add integration tests for oversized payload handling in API (#10788)

This commit is contained in:
J. Nick Koston
2025-09-21 15:07:47 -06:00
committed by GitHub
parent c64d385fa6
commit 440b0b5574
10 changed files with 461 additions and 11 deletions

View File

@@ -58,6 +58,8 @@ def _get_platformio_env(cache_dir: Path) -> dict[str, str]:
env["PLATFORMIO_CORE_DIR"] = str(cache_dir)
env["PLATFORMIO_CACHE_DIR"] = str(cache_dir / ".cache")
env["PLATFORMIO_LIBDEPS_DIR"] = str(cache_dir / "libdeps")
# Prevent cache cleaning during integration tests
env["ESPHOME_SKIP_CLEAN_BUILD"] = "1"
return env
@@ -68,6 +70,11 @@ def shared_platformio_cache() -> Generator[Path]:
test_cache_dir = Path.home() / ".esphome-integration-tests"
cache_dir = test_cache_dir / "platformio"
# Create the temp directory that PlatformIO uses to avoid race conditions
# This ensures it exists and won't be deleted by parallel processes
platformio_tmp_dir = cache_dir / ".cache" / "tmp"
platformio_tmp_dir.mkdir(parents=True, exist_ok=True)
# Use a lock file in the home directory to ensure only one process initializes the cache
# This is needed when running with pytest-xdist
# The lock file must be in a directory that already exists to avoid race conditions
@@ -83,17 +90,11 @@ def shared_platformio_cache() -> Generator[Path]:
test_cache_dir.mkdir(exist_ok=True)
with tempfile.TemporaryDirectory() as tmpdir:
# Create a basic host config
# Use the cache_init fixture for initialization
init_dir = Path(tmpdir)
fixture_path = Path(__file__).parent / "fixtures" / "cache_init.yaml"
config_path = init_dir / "cache_init.yaml"
config_path.write_text("""esphome:
name: cache-init
host:
api:
encryption:
key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w="
logger:
""")
config_path.write_text(fixture_path.read_text())
# Run compilation to populate the cache
# We must succeed here to avoid race conditions where multiple
@@ -346,7 +347,8 @@ async def wait_and_connect_api_client(
noise_psk: str | None = None,
client_info: str = "integration-test",
timeout: float = API_CONNECTION_TIMEOUT,
) -> AsyncGenerator[APIClient]:
return_disconnect_event: bool = False,
) -> AsyncGenerator[APIClient | tuple[APIClient, asyncio.Event]]:
"""Wait for API to be available and connect."""
client = APIClient(
address=address,
@@ -359,14 +361,17 @@ async def wait_and_connect_api_client(
# Create a future to signal when connected
loop = asyncio.get_running_loop()
connected_future: asyncio.Future[None] = loop.create_future()
disconnect_event = asyncio.Event()
async def on_connect() -> None:
"""Called when successfully connected."""
disconnect_event.clear() # Clear the disconnect event on new connection
if not connected_future.done():
connected_future.set_result(None)
async def on_disconnect(expected_disconnect: bool) -> None:
"""Called when disconnected."""
disconnect_event.set()
if not connected_future.done() and not expected_disconnect:
connected_future.set_exception(
APIConnectionError("Disconnected before fully connected")
@@ -397,7 +402,10 @@ async def wait_and_connect_api_client(
except TimeoutError:
raise TimeoutError(f"Failed to connect to API after {timeout} seconds")
yield client
if return_disconnect_event:
yield client, disconnect_event
else:
yield client
finally:
# Stop reconnect logic and disconnect
await reconnect_logic.stop()
@@ -430,6 +438,33 @@ async def api_client_connected(
yield _connect_client
@pytest_asyncio.fixture
async def api_client_connected_with_disconnect(
unused_tcp_port: int,
) -> AsyncGenerator:
"""Factory for creating connected API client context managers with disconnect event."""
def _connect_client_with_disconnect(
address: str = LOCALHOST,
port: int | None = None,
password: str = "",
noise_psk: str | None = None,
client_info: str = "integration-test",
timeout: float = API_CONNECTION_TIMEOUT,
):
return wait_and_connect_api_client(
address=address,
port=port if port is not None else unused_tcp_port,
password=password,
noise_psk=noise_psk,
client_info=client_info,
timeout=timeout,
return_disconnect_event=True,
)
yield _connect_client_with_disconnect
async def _read_stream_lines(
stream: asyncio.StreamReader,
lines: list[str],