From 10ca86ae8dc1daa7e74794135f047e614343eee6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 16:41:25 -1000 Subject: [PATCH 01/31] [api] Use std::unique_ptr for fixed-size byte buffers in Noise protocol (#11278) --- .../components/api/api_frame_helper_noise.cpp | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/esphome/components/api/api_frame_helper_noise.cpp b/esphome/components/api/api_frame_helper_noise.cpp index 1213e65948..e952ea670b 100644 --- a/esphome/components/api/api_frame_helper_noise.cpp +++ b/esphome/components/api/api_frame_helper_noise.cpp @@ -242,7 +242,6 @@ APIError APINoiseFrameHelper::state_action_() { const std::string &name = App.get_name(); const std::string &mac = get_mac_address(); - std::vector msg; // Calculate positions and sizes size_t name_len = name.size() + 1; // including null terminator size_t mac_len = mac.size() + 1; // including null terminator @@ -250,17 +249,17 @@ APIError APINoiseFrameHelper::state_action_() { size_t mac_offset = name_offset + name_len; size_t total_size = 1 + name_len + mac_len; - msg.resize(total_size); + auto msg = std::make_unique(total_size); // chosen proto msg[0] = 0x01; // node name, terminated by null byte - std::memcpy(msg.data() + name_offset, name.c_str(), name_len); + std::memcpy(msg.get() + name_offset, name.c_str(), name_len); // node mac, terminated by null byte - std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len); + std::memcpy(msg.get() + mac_offset, mac.c_str(), mac_len); - aerr = write_frame_(msg.data(), msg.size()); + aerr = write_frame_(msg.get(), total_size); if (aerr != APIError::OK) return aerr; @@ -339,32 +338,32 @@ void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reaso #ifdef USE_STORE_LOG_STR_IN_FLASH // On ESP8266 with flash strings, we need to use PROGMEM-aware functions size_t reason_len = strlen_P(reinterpret_cast(reason)); - std::vector data; - data.resize(reason_len + 1); + size_t data_size = reason_len + 1; + auto data = std::make_unique(data_size); data[0] = 0x01; // failure // Copy error message from PROGMEM if (reason_len > 0) { - memcpy_P(data.data() + 1, reinterpret_cast(reason), reason_len); + memcpy_P(data.get() + 1, reinterpret_cast(reason), reason_len); } #else // Normal memory access const char *reason_str = LOG_STR_ARG(reason); size_t reason_len = strlen(reason_str); - std::vector data; - data.resize(reason_len + 1); + size_t data_size = reason_len + 1; + auto data = std::make_unique(data_size); data[0] = 0x01; // failure // Copy error message in bulk if (reason_len > 0) { - std::memcpy(data.data() + 1, reason_str, reason_len); + std::memcpy(data.get() + 1, reason_str, reason_len); } #endif // temporarily remove failed state auto orig_state = state_; state_ = State::EXPLICIT_REJECT; - write_frame_(data.data(), data.size()); + write_frame_(data.get(), data_size); state_ = orig_state; } APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) { From 708f8a95e5dfdb00be84ebc3aa524749a392dc04 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 16:48:38 -1000 Subject: [PATCH 02/31] [api] Use FixedVector for HomeAssistantServiceCallAction to reduce flash usage and avoid realloc (#11277) --- esphome/components/api/__init__.py | 17 +++++++++ .../components/api/homeassistant_service.h | 36 ++++++++++++++----- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/esphome/components/api/__init__.py b/esphome/components/api/__init__.py index 58828c131d..e8dacf51bc 100644 --- a/esphome/components/api/__init__.py +++ b/esphome/components/api/__init__.py @@ -380,12 +380,19 @@ async def homeassistant_service_to_code( var = cg.new_Pvariable(action_id, template_arg, serv, False) templ = await cg.templatable(config[CONF_ACTION], args, None) cg.add(var.set_service(templ)) + + # Initialize FixedVectors with exact sizes from config + cg.add(var.init_data(len(config[CONF_DATA]))) for key, value in config[CONF_DATA].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_data(key, templ)) + + cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE]))) for key, value in config[CONF_DATA_TEMPLATE].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_data_template(key, templ)) + + cg.add(var.init_variables(len(config[CONF_VARIABLES]))) for key, value in config[CONF_VARIABLES].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_variable(key, templ)) @@ -458,15 +465,23 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args): var = cg.new_Pvariable(action_id, template_arg, serv, True) templ = await cg.templatable(config[CONF_EVENT], args, None) cg.add(var.set_service(templ)) + + # Initialize FixedVectors with exact sizes from config + cg.add(var.init_data(len(config[CONF_DATA]))) for key, value in config[CONF_DATA].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_data(key, templ)) + + cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE]))) for key, value in config[CONF_DATA_TEMPLATE].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_data_template(key, templ)) + + cg.add(var.init_variables(len(config[CONF_VARIABLES]))) for key, value in config[CONF_VARIABLES].items(): templ = await cg.templatable(value, args, None) cg.add(var.add_variable(key, templ)) + return var @@ -489,6 +504,8 @@ async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, arg serv = await cg.get_variable(config[CONF_ID]) var = cg.new_Pvariable(action_id, template_arg, serv, True) cg.add(var.set_service("esphome.tag_scanned")) + # Initialize FixedVector with exact size (1 data field) + cg.add(var.init_data(1)) templ = await cg.templatable(config[CONF_TAG], args, cg.std_string) cg.add(var.add_data("tag_id", templ)) return var diff --git a/esphome/components/api/homeassistant_service.h b/esphome/components/api/homeassistant_service.h index 46e89cb39f..4343fcd0bb 100644 --- a/esphome/components/api/homeassistant_service.h +++ b/esphome/components/api/homeassistant_service.h @@ -41,10 +41,14 @@ template class TemplatableStringValue : public TemplatableValue class TemplatableKeyValuePair { public: + // Default constructor needed for FixedVector::emplace_back() + TemplatableKeyValuePair() = default; + // Keys are always string literals from YAML dictionary keys (e.g., "code", "event") // and never templatable values or lambdas. Only the value parameter can be a lambda/template. // Using pass-by-value with std::move allows optimal performance for both lvalues and rvalues. template TemplatableKeyValuePair(std::string key, T value) : key(std::move(key)), value(value) {} + std::string key; TemplatableStringValue value; }; @@ -93,15 +97,22 @@ template class HomeAssistantServiceCallAction : public Action void set_service(T service) { this->service_ = service; } + // Initialize FixedVector members - called from Python codegen with compile-time known sizes. + // Must be called before any add_* methods; capacity must match the number of subsequent add_* calls. + void init_data(size_t count) { this->data_.init(count); } + void init_data_template(size_t count) { this->data_template_.init(count); } + void init_variables(size_t count) { this->variables_.init(count); } + // Keys are always string literals from the Python code generation (e.g., cg.add(var.add_data("tag_id", templ))). // The value parameter can be a lambda/template, but keys are never templatable. - // Using pass-by-value allows the compiler to optimize for both lvalues and rvalues. - template void add_data(std::string key, T value) { this->data_.emplace_back(std::move(key), value); } - template void add_data_template(std::string key, T value) { - this->data_template_.emplace_back(std::move(key), value); + template void add_data(K &&key, V &&value) { + this->add_kv_(this->data_, std::forward(key), std::forward(value)); } - template void add_variable(std::string key, T value) { - this->variables_.emplace_back(std::move(key), value); + template void add_data_template(K &&key, V &&value) { + this->add_kv_(this->data_template_, std::forward(key), std::forward(value)); + } + template void add_variable(K &&key, V &&value) { + this->add_kv_(this->variables_, std::forward(key), std::forward(value)); } #ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES @@ -174,6 +185,13 @@ template class HomeAssistantServiceCallAction : public Action void add_kv_(FixedVector> &vec, K &&key, V &&value) { + auto &kv = vec.emplace_back(); + kv.key = std::forward(key); + kv.value = std::forward(value); + } + template static void populate_service_map(VectorType &dest, SourceType &source, Ts... x) { dest.init(source.size()); @@ -186,9 +204,9 @@ template class HomeAssistantServiceCallAction : public Action service_{}; - std::vector> data_; - std::vector> data_template_; - std::vector> variables_; + FixedVector> data_; + FixedVector> data_template_; + FixedVector> variables_; #ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES #ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON TemplatableStringValue response_template_{""}; From 42f1b61e315aa557d3242d2d5f50b1edb4de220e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 16:58:58 -1000 Subject: [PATCH 03/31] [git] Automatically recover from broken git repositories in external_components (#11246) --- esphome/git.py | 150 +++++++++-- tests/unit_tests/conftest.py | 7 + tests/unit_tests/test_git.py | 499 ++++++++++++++++++++++++++++++++--- 3 files changed, 594 insertions(+), 62 deletions(-) diff --git a/esphome/git.py b/esphome/git.py index 62fe37a3fe..4ff07ffe75 100644 --- a/esphome/git.py +++ b/esphome/git.py @@ -5,6 +5,7 @@ import hashlib import logging from pathlib import Path import re +import shutil import subprocess import urllib.parse @@ -17,14 +18,60 @@ _LOGGER = logging.getLogger(__name__) NEVER_REFRESH = TimePeriodSeconds(seconds=-1) -def run_git_command(cmd, cwd=None) -> str: - _LOGGER.debug("Running git command: %s", " ".join(cmd)) +class GitException(cv.Invalid): + """Base exception for git-related errors.""" + + +class GitNotInstalledError(GitException): + """Exception raised when git is not installed on the system.""" + + +class GitCommandError(GitException): + """Exception raised when a git command fails.""" + + +class GitRepositoryError(GitException): + """Exception raised when a git repository is in an invalid state.""" + + +def run_git_command(cmd: list[str], git_dir: Path | None = None) -> str: + if git_dir is not None: + _LOGGER.debug( + "Running git command with repository isolation: %s (git_dir=%s)", + " ".join(cmd), + git_dir, + ) + else: + _LOGGER.debug("Running git command: %s", " ".join(cmd)) + + # Set up environment for repository isolation if git_dir is provided + # Force git to only operate on this specific repository by setting + # GIT_DIR and GIT_WORK_TREE. This prevents git from walking up the + # directory tree to find parent repositories when the target repo's + # .git directory is corrupt. Without this, commands like 'git stash' + # could accidentally operate on parent repositories (e.g., the main + # ESPHome repo) instead of failing, causing data loss. + env: dict[str, str] | None = None + cwd: str | None = None + if git_dir is not None: + env = { + **subprocess.os.environ, + "GIT_DIR": str(Path(git_dir) / ".git"), + "GIT_WORK_TREE": str(git_dir), + } + cwd = str(git_dir) + try: ret = subprocess.run( - cmd, cwd=cwd, capture_output=True, check=False, close_fds=False + cmd, + cwd=cwd, + capture_output=True, + check=False, + close_fds=False, + env=env, ) except FileNotFoundError as err: - raise cv.Invalid( + raise GitNotInstalledError( "git is not installed but required for external_components.\n" "Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git" ) from err @@ -33,8 +80,8 @@ def run_git_command(cmd, cwd=None) -> str: err_str = ret.stderr.decode("utf-8") lines = [x.strip() for x in err_str.splitlines()] if lines[-1].startswith("fatal:"): - raise cv.Invalid(lines[-1][len("fatal: ") :]) - raise cv.Invalid(err_str) + raise GitCommandError(lines[-1][len("fatal: ") :]) + raise GitCommandError(err_str) return ret.stdout.decode("utf-8").strip() @@ -55,6 +102,7 @@ def clone_or_update( username: str = None, password: str = None, submodules: list[str] | None = None, + _recover_broken: bool = True, ) -> tuple[Path, Callable[[], None] | None]: key = f"{url}@{ref}" @@ -75,15 +123,15 @@ def clone_or_update( # We need to fetch the PR branch first, otherwise git will complain # about missing objects _LOGGER.info("Fetching %s", ref) - run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir)) - run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir)) + run_git_command(["git", "fetch", "--", "origin", ref], git_dir=repo_dir) + run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], git_dir=repo_dir) if submodules is not None: _LOGGER.info( - "Initialising submodules (%s) for %s", ", ".join(submodules), key + "Initializing submodules (%s) for %s", ", ".join(submodules), key ) run_git_command( - ["git", "submodule", "update", "--init"] + submodules, str(repo_dir) + ["git", "submodule", "update", "--init"] + submodules, git_dir=repo_dir ) else: @@ -99,32 +147,82 @@ def clone_or_update( file_timestamp = Path(repo_dir / ".git" / "HEAD") age = datetime.now() - datetime.fromtimestamp(file_timestamp.stat().st_mtime) if refresh is None or age.total_seconds() > refresh.total_seconds: - old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir)) - _LOGGER.info("Updating %s", key) - _LOGGER.debug("Location: %s", repo_dir) - # Stash local changes (if any) - run_git_command( - ["git", "stash", "push", "--include-untracked"], str(repo_dir) - ) - # Fetch remote ref - cmd = ["git", "fetch", "--", "origin"] - if ref is not None: - cmd.append(ref) - run_git_command(cmd, str(repo_dir)) - # Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch) - run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir)) + # Try to update the repository, recovering from broken state if needed + old_sha: str | None = None + try: + # First verify the repository is valid by checking HEAD + # Use git_dir parameter to prevent git from walking up to parent repos + old_sha = run_git_command( + ["git", "rev-parse", "HEAD"], git_dir=repo_dir + ) + + _LOGGER.info("Updating %s", key) + _LOGGER.debug("Location: %s", repo_dir) + + # Stash local changes (if any) + # Use git_dir to ensure this only affects the specific repo + run_git_command( + ["git", "stash", "push", "--include-untracked"], + git_dir=repo_dir, + ) + + # Fetch remote ref + cmd = ["git", "fetch", "--", "origin"] + if ref is not None: + cmd.append(ref) + run_git_command(cmd, git_dir=repo_dir) + + # Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch) + run_git_command( + ["git", "reset", "--hard", "FETCH_HEAD"], + git_dir=repo_dir, + ) + except GitException as err: + # Repository is in a broken state or update failed + # Only attempt recovery once to prevent infinite recursion + if not _recover_broken: + _LOGGER.error( + "Repository %s recovery failed, cannot retry (already attempted once)", + key, + ) + raise + + _LOGGER.warning( + "Repository %s has issues (%s), attempting recovery", + key, + err, + ) + _LOGGER.info("Removing broken repository at %s", repo_dir) + shutil.rmtree(repo_dir) + _LOGGER.info("Successfully removed broken repository, re-cloning...") + + # Recursively call clone_or_update to re-clone + # Set _recover_broken=False to prevent infinite recursion + result = clone_or_update( + url=url, + ref=ref, + refresh=refresh, + domain=domain, + username=username, + password=password, + submodules=submodules, + _recover_broken=False, + ) + _LOGGER.info("Repository %s successfully recovered", key) + return result if submodules is not None: _LOGGER.info( "Updating submodules (%s) for %s", ", ".join(submodules), key ) run_git_command( - ["git", "submodule", "update", "--init"] + submodules, str(repo_dir) + ["git", "submodule", "update", "--init"] + submodules, + git_dir=repo_dir, ) def revert(): _LOGGER.info("Reverting changes to %s -> %s", key, old_sha) - run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir)) + run_git_command(["git", "reset", "--hard", old_sha], git_dir=repo_dir) return repo_dir, revert diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 932221997c..fc61841500 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -96,6 +96,13 @@ def mock_run_git_command() -> Generator[Mock, None, None]: yield mock +@pytest.fixture +def mock_subprocess_run() -> Generator[Mock, None, None]: + """Mock subprocess.run for testing.""" + with patch("subprocess.run") as mock: + yield mock + + @pytest.fixture def mock_get_idedata() -> Generator[Mock, None, None]: """Mock get_idedata for platformio_api.""" diff --git a/tests/unit_tests/test_git.py b/tests/unit_tests/test_git.py index 6a51206ec2..0411fe5e43 100644 --- a/tests/unit_tests/test_git.py +++ b/tests/unit_tests/test_git.py @@ -1,13 +1,204 @@ """Tests for git.py module.""" from datetime import datetime, timedelta -import hashlib import os from pathlib import Path +from typing import Any from unittest.mock import Mock +import pytest + from esphome import git from esphome.core import CORE, TimePeriodSeconds +from esphome.git import GitCommandError + + +def _compute_repo_dir(url: str, ref: str | None, domain: str) -> Path: + """Helper to compute the expected repo directory path using git module's logic.""" + key = f"{url}@{ref}" + return git._compute_destination_path(key, domain) + + +def _setup_old_repo(repo_dir: Path, days_old: int = 2) -> None: + """Helper to set up a git repo directory structure with an old timestamp. + + Args: + repo_dir: The repository directory path to create. + days_old: Number of days old to make the FETCH_HEAD file (default: 2). + """ + # Create repo directory + repo_dir.mkdir(parents=True) + git_dir = repo_dir / ".git" + git_dir.mkdir() + + # Create FETCH_HEAD file with old timestamp + fetch_head = git_dir / "FETCH_HEAD" + fetch_head.write_text("test") + old_time = datetime.now() - timedelta(days=days_old) + fetch_head.touch() + os.utime(fetch_head, (old_time.timestamp(), old_time.timestamp())) + + +def _get_git_command_type(cmd: list[str]) -> str | None: + """Helper to determine the type of git command from a command list. + + Args: + cmd: The git command list (e.g., ["git", "rev-parse", "HEAD"]). + + Returns: + The command type ("rev-parse", "stash", "fetch", "reset", "clone") or None. + """ + # Git commands are always in format ["git", "command", ...], so check index 1 + if len(cmd) > 1: + return cmd[1] + return None + + +def test_run_git_command_success(tmp_path: Path) -> None: + """Test that run_git_command returns output on success.""" + # Create a simple git repo to test with + repo_dir = tmp_path / "test_repo" + repo_dir.mkdir() + + # Initialize a git repo + result = git.run_git_command(["git", "init"], str(repo_dir)) + assert "Initialized empty Git repository" in result or result == "" + + # Verify we can run a command and get output + result = git.run_git_command(["git", "status", "--porcelain"], str(repo_dir)) + # Empty repo should have empty status + assert isinstance(result, str) + + +def test_run_git_command_with_git_dir_isolation( + tmp_path: Path, mock_subprocess_run: Mock +) -> None: + """Test that git_dir parameter properly isolates git operations.""" + repo_dir = tmp_path / "test_repo" + repo_dir.mkdir() + git_dir = repo_dir / ".git" + git_dir.mkdir() + + # Configure mock to return success + mock_subprocess_run.return_value = Mock( + returncode=0, + stdout=b"test output", + stderr=b"", + ) + + result = git.run_git_command( + ["git", "rev-parse", "HEAD"], + git_dir=repo_dir, + ) + + # Verify subprocess.run was called + assert mock_subprocess_run.called + call_args = mock_subprocess_run.call_args + + # Verify environment was set + env = call_args[1]["env"] + assert "GIT_DIR" in env + assert "GIT_WORK_TREE" in env + assert env["GIT_DIR"] == str(repo_dir / ".git") + assert env["GIT_WORK_TREE"] == str(repo_dir) + + assert result == "test output" + + +def test_run_git_command_raises_git_not_installed_error( + tmp_path: Path, mock_subprocess_run: Mock +) -> None: + """Test that FileNotFoundError is converted to GitNotInstalledError.""" + from esphome.git import GitNotInstalledError + + repo_dir = tmp_path / "test_repo" + + # Configure mock to raise FileNotFoundError + mock_subprocess_run.side_effect = FileNotFoundError("git not found") + + with pytest.raises(GitNotInstalledError, match="git is not installed"): + git.run_git_command(["git", "status"], git_dir=repo_dir) + + +def test_run_git_command_raises_git_command_error_on_failure( + tmp_path: Path, mock_subprocess_run: Mock +) -> None: + """Test that failed git commands raise GitCommandError.""" + repo_dir = tmp_path / "test_repo" + + # Configure mock to return non-zero exit code + mock_subprocess_run.return_value = Mock( + returncode=1, + stdout=b"", + stderr=b"fatal: not a git repository", + ) + + with pytest.raises(GitCommandError, match="not a git repository"): + git.run_git_command(["git", "status"], git_dir=repo_dir) + + +def test_run_git_command_strips_fatal_prefix( + tmp_path: Path, mock_subprocess_run: Mock +) -> None: + """Test that 'fatal: ' prefix is stripped from error messages.""" + repo_dir = tmp_path / "test_repo" + + # Configure mock to return error with "fatal: " prefix + mock_subprocess_run.return_value = Mock( + returncode=128, + stdout=b"", + stderr=b"fatal: repository not found\n", + ) + + with pytest.raises(GitCommandError) as exc_info: + git.run_git_command(["git", "clone", "invalid-url"], git_dir=repo_dir) + + # Error message should NOT include "fatal: " prefix + assert "fatal:" not in str(exc_info.value) + assert "repository not found" in str(exc_info.value) + + +def test_run_git_command_without_git_dir(mock_subprocess_run: Mock) -> None: + """Test that run_git_command works without git_dir (clone case).""" + # Configure mock to return success + mock_subprocess_run.return_value = Mock( + returncode=0, + stdout=b"Cloning into 'test_repo'...", + stderr=b"", + ) + + result = git.run_git_command(["git", "clone", "https://github.com/test/repo"]) + + # Verify subprocess.run was called + assert mock_subprocess_run.called + call_args = mock_subprocess_run.call_args + + # Verify environment does NOT have GIT_DIR or GIT_WORK_TREE set + # (it should use the default environment or None) + env = call_args[1].get("env") + if env is not None: + assert "GIT_DIR" not in env + assert "GIT_WORK_TREE" not in env + + # Verify cwd is None (default) + assert call_args[1].get("cwd") is None + + assert result == "Cloning into 'test_repo'..." + + +def test_run_git_command_without_git_dir_raises_error( + mock_subprocess_run: Mock, +) -> None: + """Test that run_git_command without git_dir can still raise errors.""" + # Configure mock to return error + mock_subprocess_run.return_value = Mock( + returncode=128, + stdout=b"", + stderr=b"fatal: repository not found\n", + ) + + with pytest.raises(GitCommandError, match="repository not found"): + git.run_git_command(["git", "clone", "https://invalid.url/repo.git"]) def test_clone_or_update_with_never_refresh( @@ -17,16 +208,10 @@ def test_clone_or_update_with_never_refresh( # Set up CORE.config_path so data_dir uses tmp_path CORE.config_path = tmp_path / "test.yaml" - # Compute the expected repo directory path url = "https://github.com/test/repo" ref = None - key = f"{url}@{ref}" domain = "test" - - # Compute hash-based directory name (matching _compute_destination_path logic) - h = hashlib.new("sha256") - h.update(key.encode()) - repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8] + repo_dir = _compute_repo_dir(url, ref, domain) # Create the git repo directory structure repo_dir.mkdir(parents=True) @@ -58,16 +243,10 @@ def test_clone_or_update_with_refresh_updates_old_repo( # Set up CORE.config_path so data_dir uses tmp_path CORE.config_path = tmp_path / "test.yaml" - # Compute the expected repo directory path url = "https://github.com/test/repo" ref = None - key = f"{url}@{ref}" domain = "test" - - # Compute hash-based directory name (matching _compute_destination_path logic) - h = hashlib.new("sha256") - h.update(key.encode()) - repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8] + repo_dir = _compute_repo_dir(url, ref, domain) # Create the git repo directory structure repo_dir.mkdir(parents=True) @@ -112,16 +291,10 @@ def test_clone_or_update_with_refresh_skips_fresh_repo( # Set up CORE.config_path so data_dir uses tmp_path CORE.config_path = tmp_path / "test.yaml" - # Compute the expected repo directory path url = "https://github.com/test/repo" ref = None - key = f"{url}@{ref}" domain = "test" - - # Compute hash-based directory name (matching _compute_destination_path logic) - h = hashlib.new("sha256") - h.update(key.encode()) - repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8] + repo_dir = _compute_repo_dir(url, ref, domain) # Create the git repo directory structure repo_dir.mkdir(parents=True) @@ -158,16 +331,10 @@ def test_clone_or_update_clones_missing_repo( # Set up CORE.config_path so data_dir uses tmp_path CORE.config_path = tmp_path / "test.yaml" - # Compute the expected repo directory path url = "https://github.com/test/repo" ref = None - key = f"{url}@{ref}" domain = "test" - - # Compute hash-based directory name (matching _compute_destination_path logic) - h = hashlib.new("sha256") - h.update(key.encode()) - repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8] + repo_dir = _compute_repo_dir(url, ref, domain) # Create base directory but NOT the repo itself base_dir = tmp_path / ".esphome" / domain @@ -200,16 +367,10 @@ def test_clone_or_update_with_none_refresh_always_updates( # Set up CORE.config_path so data_dir uses tmp_path CORE.config_path = tmp_path / "test.yaml" - # Compute the expected repo directory path url = "https://github.com/test/repo" ref = None - key = f"{url}@{ref}" domain = "test" - - # Compute hash-based directory name (matching _compute_destination_path logic) - h = hashlib.new("sha256") - h.update(key.encode()) - repo_dir = tmp_path / ".esphome" / domain / h.hexdigest()[:8] + repo_dir = _compute_repo_dir(url, ref, domain) # Create the git repo directory structure repo_dir.mkdir(parents=True) @@ -244,3 +405,269 @@ def test_clone_or_update_with_none_refresh_always_updates( if len(call[0]) > 0 and "fetch" in call[0][0] ] assert len(fetch_calls) > 0 + + +@pytest.mark.parametrize( + ("fail_command", "error_message"), + [ + ( + "rev-parse", + "ambiguous argument 'HEAD': unknown revision or path not in the working tree.", + ), + ("stash", "fatal: unable to write new index file"), + ( + "fetch", + "fatal: unable to access 'https://github.com/test/repo/': Could not resolve host", + ), + ("reset", "fatal: Could not reset index file to revision 'FETCH_HEAD'"), + ], +) +def test_clone_or_update_recovers_from_git_failures( + tmp_path: Path, mock_run_git_command: Mock, fail_command: str, error_message: str +) -> None: + """Test that repos are re-cloned when various git commands fail.""" + # Set up CORE.config_path so data_dir uses tmp_path + CORE.config_path = tmp_path / "test.yaml" + + url = "https://github.com/test/repo" + ref = "main" + domain = "test" + repo_dir = _compute_repo_dir(url, ref, domain) + + # Use helper to set up old repo + _setup_old_repo(repo_dir) + + # Track command call counts to make first call fail, subsequent calls succeed + call_counts: dict[str, int] = {} + + def git_command_side_effect( + cmd: list[str], cwd: str | None = None, **kwargs: Any + ) -> str: + # Determine which command this is + cmd_type = _get_git_command_type(cmd) + + # Track call count for this command type + if cmd_type: + call_counts[cmd_type] = call_counts.get(cmd_type, 0) + 1 + + # Fail on first call to the specified command, succeed on subsequent calls + if cmd_type == fail_command and call_counts[cmd_type] == 1: + raise GitCommandError(error_message) + + # Default successful responses + if cmd_type == "rev-parse": + return "abc123" + return "" + + mock_run_git_command.side_effect = git_command_side_effect + + refresh = TimePeriodSeconds(days=1) + result_dir, revert = git.clone_or_update( + url=url, + ref=ref, + refresh=refresh, + domain=domain, + ) + + # Verify recovery happened + call_list = mock_run_git_command.call_args_list + + # Should have attempted the failing command + assert any(fail_command in str(c) for c in call_list) + + # Should have called clone for recovery + assert any("clone" in str(c) for c in call_list) + + # Verify the repo directory path is returned + assert result_dir == repo_dir + + +def test_clone_or_update_fails_when_recovery_also_fails( + tmp_path: Path, mock_run_git_command: Mock +) -> None: + """Test that we don't infinitely recurse when recovery also fails.""" + # Set up CORE.config_path so data_dir uses tmp_path + CORE.config_path = tmp_path / "test.yaml" + + url = "https://github.com/test/repo" + ref = "main" + domain = "test" + repo_dir = _compute_repo_dir(url, ref, domain) + + # Use helper to set up old repo + _setup_old_repo(repo_dir) + + # Mock git command to fail on clone (simulating network failure during recovery) + def git_command_side_effect( + cmd: list[str], cwd: str | None = None, **kwargs: Any + ) -> str: + cmd_type = _get_git_command_type(cmd) + if cmd_type == "rev-parse": + # First time fails (broken repo) + raise GitCommandError( + "ambiguous argument 'HEAD': unknown revision or path not in the working tree." + ) + if cmd_type == "clone": + # Clone also fails (recovery fails) + raise GitCommandError("fatal: unable to access repository") + return "" + + mock_run_git_command.side_effect = git_command_side_effect + + refresh = TimePeriodSeconds(days=1) + + # Should raise after one recovery attempt fails + with pytest.raises(GitCommandError, match="fatal: unable to access repository"): + git.clone_or_update( + url=url, + ref=ref, + refresh=refresh, + domain=domain, + ) + + # Verify we only tried to clone once (no infinite recursion) + call_list = mock_run_git_command.call_args_list + clone_calls = [c for c in call_list if "clone" in c[0][0]] + # Should have exactly one clone call (the recovery attempt that failed) + assert len(clone_calls) == 1 + # Should have tried rev-parse once (which failed and triggered recovery) + rev_parse_calls = [c for c in call_list if "rev-parse" in c[0][0]] + assert len(rev_parse_calls) == 1 + + +def test_clone_or_update_recover_broken_flag_prevents_second_recovery( + tmp_path: Path, mock_run_git_command: Mock +) -> None: + """Test that _recover_broken=False prevents a second recovery attempt (tests the raise path).""" + # Set up CORE.config_path so data_dir uses tmp_path + CORE.config_path = tmp_path / "test.yaml" + + url = "https://github.com/test/repo" + ref = "main" + domain = "test" + repo_dir = _compute_repo_dir(url, ref, domain) + + # Use helper to set up old repo + _setup_old_repo(repo_dir) + + # Track fetch calls to differentiate between first (in clone) and second (in recovery update) + call_counts: dict[str, int] = {} + + # Mock git command to fail on fetch during recovery's ref checkout + def git_command_side_effect( + cmd: list[str], cwd: str | None = None, **kwargs: Any + ) -> str: + cmd_type = _get_git_command_type(cmd) + + if cmd_type: + call_counts[cmd_type] = call_counts.get(cmd_type, 0) + 1 + + # First attempt: rev-parse fails (broken repo) + if cmd_type == "rev-parse" and call_counts[cmd_type] == 1: + raise GitCommandError( + "ambiguous argument 'HEAD': unknown revision or path not in the working tree." + ) + + # Recovery: clone succeeds + if cmd_type == "clone": + return "" + + # Recovery: fetch for ref checkout fails + # This happens in the clone path when ref is not None (line 80 in git.py) + if cmd_type == "fetch" and call_counts[cmd_type] == 1: + raise GitCommandError("fatal: couldn't find remote ref main") + + # Default success + return "abc123" if cmd_type == "rev-parse" else "" + + mock_run_git_command.side_effect = git_command_side_effect + + refresh = TimePeriodSeconds(days=1) + + # Should raise on the fetch during recovery (when _recover_broken=False) + # This tests the critical "if not _recover_broken: raise" path + with pytest.raises(GitCommandError, match="fatal: couldn't find remote ref main"): + git.clone_or_update( + url=url, + ref=ref, + refresh=refresh, + domain=domain, + ) + + # Verify the sequence of events + call_list = mock_run_git_command.call_args_list + + # Should have: rev-parse (fail, triggers recovery), clone (success), + # fetch (fail during ref checkout, raises because _recover_broken=False) + rev_parse_calls = [c for c in call_list if "rev-parse" in c[0][0]] + # Should have exactly one rev-parse call that failed + assert len(rev_parse_calls) == 1 + + clone_calls = [c for c in call_list if "clone" in c[0][0]] + # Should have exactly one clone call (the recovery attempt) + assert len(clone_calls) == 1 + + fetch_calls = [c for c in call_list if "fetch" in c[0][0]] + # Should have exactly one fetch call that failed (during ref checkout in recovery) + assert len(fetch_calls) == 1 + + +def test_clone_or_update_recover_broken_flag_prevents_infinite_loop( + tmp_path: Path, mock_run_git_command: Mock +) -> None: + """Test that _recover_broken=False prevents infinite recursion when repo persists.""" + # This tests the critical "if not _recover_broken: raise" path at line 124-125 + # Set up CORE.config_path so data_dir uses tmp_path + CORE.config_path = tmp_path / "test.yaml" + + url = "https://github.com/test/repo" + ref = "main" + domain = "test" + repo_dir = _compute_repo_dir(url, ref, domain) + + # Use helper to set up old repo + _setup_old_repo(repo_dir) + + # Mock shutil.rmtree to NOT actually delete the directory + # This simulates a scenario where deletion fails (permissions, etc.) + import unittest.mock + + def mock_rmtree(path, *args, **kwargs): + # Don't actually delete - this causes the recursive call to still see the repo + pass + + # Mock git commands to always fail on stash + def git_command_side_effect( + cmd: list[str], cwd: str | None = None, **kwargs: Any + ) -> str: + cmd_type = _get_git_command_type(cmd) + if cmd_type == "rev-parse": + return "abc123" + if cmd_type == "stash": + # Always fails + raise GitCommandError("fatal: unable to write new index file") + return "" + + mock_run_git_command.side_effect = git_command_side_effect + + refresh = TimePeriodSeconds(days=1) + + # Mock shutil.rmtree and test + # Should raise on the second attempt when _recover_broken=False + # This hits the "if not _recover_broken: raise" path + with ( + unittest.mock.patch("esphome.git.shutil.rmtree", side_effect=mock_rmtree), + pytest.raises(GitCommandError, match="fatal: unable to write new index file"), + ): + git.clone_or_update( + url=url, + ref=ref, + refresh=refresh, + domain=domain, + ) + + # Verify the sequence: stash fails twice (once triggering recovery, once raising) + call_list = mock_run_git_command.call_args_list + stash_calls = [c for c in call_list if "stash" in c[0][0]] + # Should have exactly two stash calls + assert len(stash_calls) == 2 From 236ca12d3e98c62fbb6694f6b20047ddb814554e Mon Sep 17 00:00:00 2001 From: Keith Burzinski Date: Wed, 15 Oct 2025 21:59:55 -0500 Subject: [PATCH 04/31] [api, climate, thermostat] Implement feature_flags for `climate` (#10987) Co-authored-by: J. Nick Koston --- esphome/components/api/api.proto | 11 ++- esphome/components/api/api_connection.cpp | 19 ++-- esphome/components/api/api_pb2.cpp | 2 + esphome/components/api/api_pb2.h | 3 +- esphome/components/api/api_pb2_dump.cpp | 1 + esphome/components/climate/climate.cpp | 48 +++++---- esphome/components/climate/climate_mode.h | 15 +++ esphome/components/climate/climate_traits.h | 99 +++++++++++++------ .../thermostat/thermostat_climate.cpp | 12 ++- 9 files changed, 142 insertions(+), 68 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index c7d8fb28f0..30e3cfa056 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -987,8 +987,8 @@ message ListEntitiesClimateResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - bool supports_current_temperature = 5; - bool supports_two_point_target_temperature = 6; + bool supports_current_temperature = 5; // Deprecated: use feature_flags + bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set"]; float visual_min_temperature = 8; float visual_max_temperature = 9; @@ -997,7 +997,7 @@ message ListEntitiesClimateResponse { // is if CLIMATE_PRESET_AWAY exists is supported_presets // Deprecated in API version 1.5 bool legacy_supports_away = 11 [deprecated=true]; - bool supports_action = 12; + bool supports_action = 12; // Deprecated: use feature_flags repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set"]; repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set"]; repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"]; @@ -1007,11 +1007,12 @@ message ListEntitiesClimateResponse { string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 20; float visual_current_temperature_step = 21; - bool supports_current_humidity = 22; - bool supports_target_humidity = 23; + bool supports_current_humidity = 22; // Deprecated: use feature_flags + bool supports_target_humidity = 23; // Deprecated: use feature_flags float visual_min_humidity = 24; float visual_max_humidity = 25; uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"]; + uint32 feature_flags = 27; } message ClimateStateResponse { option (id) = 47; diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index ae03dfbb33..1f3456a205 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -27,6 +27,9 @@ #ifdef USE_BLUETOOTH_PROXY #include "esphome/components/bluetooth_proxy/bluetooth_proxy.h" #endif +#ifdef USE_CLIMATE +#include "esphome/components/climate/climate_mode.h" +#endif #ifdef USE_VOICE_ASSISTANT #include "esphome/components/voice_assistant/voice_assistant.h" #endif @@ -623,9 +626,10 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection auto traits = climate->get_traits(); resp.mode = static_cast(climate->mode); resp.action = static_cast(climate->action); - if (traits.get_supports_current_temperature()) + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) resp.current_temperature = climate->current_temperature; - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { resp.target_temperature_low = climate->target_temperature_low; resp.target_temperature_high = climate->target_temperature_high; } else { @@ -644,9 +648,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection } if (traits.get_supports_swing_modes()) resp.swing_mode = static_cast(climate->swing_mode); - if (traits.get_supports_current_humidity()) + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) resp.current_humidity = climate->current_humidity; - if (traits.get_supports_target_humidity()) + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) resp.target_humidity = climate->target_humidity; return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single); @@ -656,10 +660,14 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection auto *climate = static_cast(entity); ListEntitiesClimateResponse msg; auto traits = climate->get_traits(); + // Flags set for backward compatibility, deprecated in 2025.11.0 msg.supports_current_temperature = traits.get_supports_current_temperature(); msg.supports_current_humidity = traits.get_supports_current_humidity(); msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature(); msg.supports_target_humidity = traits.get_supports_target_humidity(); + msg.supports_action = traits.get_supports_action(); + // Current feature flags and other supported parameters + msg.feature_flags = traits.get_feature_flags(); msg.supported_modes = &traits.get_supported_modes_for_api_(); msg.visual_min_temperature = traits.get_visual_min_temperature(); msg.visual_max_temperature = traits.get_visual_max_temperature(); @@ -667,7 +675,6 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection msg.visual_current_temperature_step = traits.get_visual_current_temperature_step(); msg.visual_min_humidity = traits.get_visual_min_humidity(); msg.visual_max_humidity = traits.get_visual_max_humidity(); - msg.supports_action = traits.get_supports_action(); msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_(); msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_(); msg.supported_presets = &traits.get_supported_presets_for_api_(); @@ -1406,7 +1413,7 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) { HelloResponse resp; resp.api_version_major = 1; - resp.api_version_minor = 12; + resp.api_version_minor = 13; // Send only the version string - the client only logs this for debugging and doesn't use it otherwise resp.set_server_info(ESPHOME_VERSION_REF); resp.set_name(StringRef(App.get_name())); diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index 70bcf082a6..ee25d4c02d 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -1185,6 +1185,7 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const { #ifdef USE_DEVICES buffer.encode_uint32(26, this->device_id); #endif + buffer.encode_uint32(27, this->feature_flags); } void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const { size.add_length(1, this->object_id_ref_.size()); @@ -1239,6 +1240,7 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const { #ifdef USE_DEVICES size.add_uint32(2, this->device_id); #endif + size.add_uint32(2, this->feature_flags); } void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_fixed32(1, this->key); diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 20866850a9..fca3f546b5 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -1369,7 +1369,7 @@ class CameraImageRequest final : public ProtoDecodableMessage { class ListEntitiesClimateResponse final : public InfoResponseProtoMessage { public: static constexpr uint8_t MESSAGE_TYPE = 46; - static constexpr uint8_t ESTIMATED_SIZE = 145; + static constexpr uint8_t ESTIMATED_SIZE = 150; #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_climate_response"; } #endif @@ -1390,6 +1390,7 @@ class ListEntitiesClimateResponse final : public InfoResponseProtoMessage { bool supports_target_humidity{false}; float visual_min_humidity{0.0f}; float visual_max_humidity{0.0f}; + uint32_t feature_flags{0}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index cf732e451b..e803125f53 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -1292,6 +1292,7 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const { #ifdef USE_DEVICES dump_field(out, "device_id", this->device_id); #endif + dump_field(out, "feature_flags", this->feature_flags); } void ClimateStateResponse::dump_to(std::string &out) const { MessageDumpHelper helper(out, "ClimateStateResponse"); diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index e7a454d459..f3c93ed44e 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -96,7 +96,8 @@ void ClimateCall::validate_() { } if (this->target_temperature_.has_value()) { auto target = *this->target_temperature_; - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGW(TAG, " Cannot set target temperature for climate device " "with two-point target temperature!"); this->target_temperature_.reset(); @@ -106,7 +107,8 @@ void ClimateCall::validate_() { } } if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) { - if (!traits.get_supports_two_point_target_temperature()) { + if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!"); this->target_temperature_low_.reset(); this->target_temperature_high_.reset(); @@ -350,13 +352,14 @@ void Climate::save_state_() { state.mode = this->mode; auto traits = this->get_traits(); - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { state.target_temperature_low = this->target_temperature_low; state.target_temperature_high = this->target_temperature_high; } else { state.target_temperature = this->target_temperature; } - if (traits.get_supports_target_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { state.target_humidity = this->target_humidity; } if (traits.get_supports_fan_modes() && fan_mode.has_value()) { @@ -400,7 +403,7 @@ void Climate::publish_state() { auto traits = this->get_traits(); ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode))); - if (traits.get_supports_action()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) { ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action))); } if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) { @@ -418,19 +421,20 @@ void Climate::publish_state() { if (traits.get_supports_swing_modes()) { ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode))); } - if (traits.get_supports_current_temperature()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature); } - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low, this->target_temperature_high); } else { ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature); } - if (traits.get_supports_current_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity); } - if (traits.get_supports_target_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity); } @@ -485,13 +489,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) { auto call = climate->make_call(); auto traits = climate->get_traits(); call.set_mode(this->mode); - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { call.set_target_temperature_low(this->target_temperature_low); call.set_target_temperature_high(this->target_temperature_high); } else { call.set_target_temperature(this->target_temperature); } - if (traits.get_supports_target_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { call.set_target_humidity(this->target_humidity); } if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) { @@ -508,13 +513,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) { void ClimateDeviceRestoreState::apply(Climate *climate) { auto traits = climate->get_traits(); climate->mode = this->mode; - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { climate->target_temperature_low = this->target_temperature_low; climate->target_temperature_high = this->target_temperature_high; } else { climate->target_temperature = this->target_temperature; } - if (traits.get_supports_target_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { climate->target_humidity = this->target_humidity; } if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) { @@ -580,28 +586,30 @@ void Climate::dump_traits_(const char *tag) { " Target: %.1f", traits.get_visual_min_temperature(), traits.get_visual_max_temperature(), traits.get_visual_target_temperature_step()); - if (traits.get_supports_current_temperature()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step()); } - if (traits.get_supports_target_humidity() || traits.get_supports_current_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY | + climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { ESP_LOGCONFIG(tag, " - Min humidity: %.0f\n" " - Max humidity: %.0f", traits.get_visual_min_humidity(), traits.get_visual_max_humidity()); } - if (traits.get_supports_two_point_target_temperature()) { + if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature"); } - if (traits.get_supports_current_temperature()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { ESP_LOGCONFIG(tag, " [x] Supports current temperature"); } - if (traits.get_supports_target_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { ESP_LOGCONFIG(tag, " [x] Supports target humidity"); } - if (traits.get_supports_current_humidity()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { ESP_LOGCONFIG(tag, " [x] Supports current humidity"); } - if (traits.get_supports_action()) { + if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) { ESP_LOGCONFIG(tag, " [x] Supports action"); } if (!traits.get_supported_modes().empty()) { diff --git a/esphome/components/climate/climate_mode.h b/esphome/components/climate/climate_mode.h index 80efb4c048..faec5d2537 100644 --- a/esphome/components/climate/climate_mode.h +++ b/esphome/components/climate/climate_mode.h @@ -98,6 +98,21 @@ enum ClimatePreset : uint8_t { CLIMATE_PRESET_ACTIVITY = 7, }; +enum ClimateFeature : uint32_t { + // Reporting current temperature is supported + CLIMATE_SUPPORTS_CURRENT_TEMPERATURE = 1 << 0, + // Setting two target temperatures is supported (used in conjunction with CLIMATE_MODE_HEAT_COOL) + CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE = 1 << 1, + // Single-point mode is NOT supported (UI always displays two handles, setting 'target_temperature' is not supported) + CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE = 1 << 2, + // Reporting current humidity is supported + CLIMATE_SUPPORTS_CURRENT_HUMIDITY = 1 << 3, + // Setting a target humidity is supported + CLIMATE_SUPPORTS_TARGET_HUMIDITY = 1 << 4, + // Reporting current climate action is supported + CLIMATE_SUPPORTS_ACTION = 1 << 5, +}; + /// Convert the given ClimateMode to a human-readable string. const LogString *climate_mode_to_string(ClimateMode mode); diff --git a/esphome/components/climate/climate_traits.h b/esphome/components/climate/climate_traits.h index 8bd4714753..50c1e79ad2 100644 --- a/esphome/components/climate/climate_traits.h +++ b/esphome/components/climate/climate_traits.h @@ -21,48 +21,92 @@ namespace climate { * - Target Temperature * * All other properties and modes are optional and the integration must mark - * each of them as supported by setting the appropriate flag here. + * each of them as supported by setting the appropriate flag(s) here. * - * - supports current temperature - if the climate device supports reporting a current temperature - * - supports two point target temperature - if the climate device's target temperature should be - * split in target_temperature_low and target_temperature_high instead of just the single target_temperature + * - feature flags: see ClimateFeatures enum in climate_mode.h * - supports modes: * - auto mode (automatic control) * - cool mode (lowers current temperature) * - heat mode (increases current temperature) * - dry mode (removes humidity from air) * - fan mode (only turns on fan) - * - supports action - if the climate device supports reporting the active - * current action of the device with the action property. * - supports fan modes - optionally, if it has a fan which can be configured in different ways: * - on, off, auto, high, medium, low, middle, focus, diffuse, quiet * - supports swing modes - optionally, if it has a swing which can be configured in different ways: * - off, both, vertical, horizontal * * This class also contains static data for the climate device display: - * - visual min/max temperature - tells the frontend what range of temperatures the climate device - * should display (gauge min/max values) + * - visual min/max temperature/humidity - tells the frontend what range of temperature/humidity the + * climate device should display (gauge min/max values) * - temperature step - the step with which to increase/decrease target temperature. * This also affects with how many decimal places the temperature is shown */ class ClimateTraits { public: - bool get_supports_current_temperature() const { return this->supports_current_temperature_; } + /// Get/set feature flags (see ClimateFeatures enum in climate_mode.h) + uint32_t get_feature_flags() const { return this->feature_flags_; } + void add_feature_flags(uint32_t feature_flags) { this->feature_flags_ |= feature_flags; } + void clear_feature_flags(uint32_t feature_flags) { this->feature_flags_ &= ~feature_flags; } + bool has_feature_flags(uint32_t feature_flags) const { return this->feature_flags_ & feature_flags; } + void set_feature_flags(uint32_t feature_flags) { this->feature_flags_ = feature_flags; } + + ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0") + bool get_supports_current_temperature() const { + return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } + ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0") void set_supports_current_temperature(bool supports_current_temperature) { - this->supports_current_temperature_ = supports_current_temperature; + if (supports_current_temperature) { + this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } else { + this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } } - bool get_supports_current_humidity() const { return this->supports_current_humidity_; } + ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0") + bool get_supports_current_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); } + ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0") void set_supports_current_humidity(bool supports_current_humidity) { - this->supports_current_humidity_ = supports_current_humidity; + if (supports_current_humidity) { + this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); + } else { + this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); + } } - bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; } + ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0") + bool get_supports_two_point_target_temperature() const { + return this->has_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE); + } + ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0") void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) { - this->supports_two_point_target_temperature_ = supports_two_point_target_temperature; + if (supports_two_point_target_temperature) + // Use CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE to mimic previous behavior + { + this->add_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE); + } else { + this->clear_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE); + } } - bool get_supports_target_humidity() const { return this->supports_target_humidity_; } + ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0") + bool get_supports_target_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); } + ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0") void set_supports_target_humidity(bool supports_target_humidity) { - this->supports_target_humidity_ = supports_target_humidity; + if (supports_target_humidity) { + this->add_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); + } else { + this->clear_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); + } } + ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0") + bool get_supports_action() const { return this->has_feature_flags(CLIMATE_SUPPORTS_ACTION); } + ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0") + void set_supports_action(bool supports_action) { + if (supports_action) { + this->add_feature_flags(CLIMATE_SUPPORTS_ACTION); + } else { + this->clear_feature_flags(CLIMATE_SUPPORTS_ACTION); + } + } + void set_supported_modes(std::set modes) { this->supported_modes_ = std::move(modes); } void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); } ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") @@ -82,9 +126,6 @@ class ClimateTraits { bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); } const std::set &get_supported_modes() const { return this->supported_modes_; } - void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; } - bool get_supports_action() const { return this->supports_action_; } - void set_supported_fan_modes(std::set modes) { this->supported_fan_modes_ = std::move(modes); } void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); } void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); } @@ -219,24 +260,20 @@ class ClimateTraits { } } - bool supports_current_temperature_{false}; - bool supports_current_humidity_{false}; - bool supports_two_point_target_temperature_{false}; - bool supports_target_humidity_{false}; - std::set supported_modes_ = {climate::CLIMATE_MODE_OFF}; - bool supports_action_{false}; - std::set supported_fan_modes_; - std::set supported_swing_modes_; - std::set supported_presets_; - std::set supported_custom_fan_modes_; - std::set supported_custom_presets_; - + uint32_t feature_flags_{0}; float visual_min_temperature_{10}; float visual_max_temperature_{30}; float visual_target_temperature_step_{0.1}; float visual_current_temperature_step_{0.1}; float visual_min_humidity_{30}; float visual_max_humidity_{99}; + + std::set supported_modes_ = {climate::CLIMATE_MODE_OFF}; + std::set supported_fan_modes_; + std::set supported_swing_modes_; + std::set supported_presets_; + std::set supported_custom_fan_modes_; + std::set supported_custom_presets_; }; } // namespace climate diff --git a/esphome/components/thermostat/thermostat_climate.cpp b/esphome/components/thermostat/thermostat_climate.cpp index e2db3ca5e1..784b3cfb50 100644 --- a/esphome/components/thermostat/thermostat_climate.cpp +++ b/esphome/components/thermostat/thermostat_climate.cpp @@ -241,9 +241,14 @@ void ThermostatClimate::control(const climate::ClimateCall &call) { climate::ClimateTraits ThermostatClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); + + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + + if (this->supports_two_points_) + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE); + if (this->humidity_sensor_ != nullptr) - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); if (this->supports_auto_) traits.add_supported_mode(climate::CLIMATE_MODE_AUTO); @@ -294,9 +299,6 @@ climate::ClimateTraits ThermostatClimate::traits() { for (auto &it : this->custom_preset_config_) { traits.add_supported_custom_preset(it.first); } - - traits.set_supports_two_point_target_temperature(this->supports_two_points_); - traits.set_supports_action(true); return traits; } From 3e1c8f37c562399640abd6206a0ac8cdbea3f4e0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:16:28 -1000 Subject: [PATCH 05/31] [i2s_audio] Refactor to use CORE.data instead of module-level globals (#11223) --- esphome/components/i2s_audio/__init__.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/esphome/components/i2s_audio/__init__.py b/esphome/components/i2s_audio/__init__.py index 8ceff26d84..907429ee0e 100644 --- a/esphome/components/i2s_audio/__init__.py +++ b/esphome/components/i2s_audio/__init__.py @@ -143,7 +143,18 @@ def validate_mclk_divisible_by_3(config): return config -_use_legacy_driver = None +# Key for storing legacy driver setting in CORE.data +I2S_USE_LEGACY_DRIVER_KEY = "i2s_use_legacy_driver" + + +def _get_use_legacy_driver(): + """Get the legacy driver setting from CORE.data.""" + return CORE.data.get(I2S_USE_LEGACY_DRIVER_KEY) + + +def _set_use_legacy_driver(value: bool) -> None: + """Set the legacy driver setting in CORE.data.""" + CORE.data[I2S_USE_LEGACY_DRIVER_KEY] = value def i2s_audio_component_schema( @@ -209,17 +220,15 @@ async def register_i2s_audio_component(var, config): def validate_use_legacy(value): - global _use_legacy_driver # noqa: PLW0603 if CONF_USE_LEGACY in value: - if (_use_legacy_driver is not None) and ( - _use_legacy_driver != value[CONF_USE_LEGACY] - ): + existing_value = _get_use_legacy_driver() + if (existing_value is not None) and (existing_value != value[CONF_USE_LEGACY]): raise cv.Invalid( f"All i2s_audio components must set {CONF_USE_LEGACY} to the same value." ) if (not value[CONF_USE_LEGACY]) and (CORE.using_arduino): raise cv.Invalid("Arduino supports only the legacy i2s driver") - _use_legacy_driver = value[CONF_USE_LEGACY] + _set_use_legacy_driver(value[CONF_USE_LEGACY]) return value @@ -249,7 +258,8 @@ def _final_validate(_): def use_legacy(): - return not (CORE.using_esp_idf and not _use_legacy_driver) + legacy_driver = _get_use_legacy_driver() + return not (CORE.using_esp_idf and not legacy_driver) FINAL_VALIDATE_SCHEMA = _final_validate From 2b0b82b2fb66b1aefdf495f22c5c0ef92c53c28e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:17:16 -1000 Subject: [PATCH 06/31] [esp32_ble] Refactor to use CORE.data instead of module-level globals (#11222) --- esphome/components/esp32_ble/__init__.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/esphome/components/esp32_ble/__init__.py b/esphome/components/esp32_ble/__init__.py index 246ebf0729..411c2add71 100644 --- a/esphome/components/esp32_ble/__init__.py +++ b/esphome/components/esp32_ble/__init__.py @@ -108,8 +108,13 @@ class BTLoggers(Enum): """ESP32 WiFi provisioning over Bluetooth""" -# Set to track which loggers are needed by components -_required_loggers: set[BTLoggers] = set() +# Key for storing required loggers in CORE.data +ESP32_BLE_REQUIRED_LOGGERS_KEY = "esp32_ble_required_loggers" + + +def _get_required_loggers() -> set[BTLoggers]: + """Get the set of required Bluetooth loggers from CORE.data.""" + return CORE.data.setdefault(ESP32_BLE_REQUIRED_LOGGERS_KEY, set()) # Dataclass for handler registration counts @@ -170,12 +175,13 @@ def register_bt_logger(*loggers: BTLoggers) -> None: Args: *loggers: One or more BTLoggers enum members """ + required_loggers = _get_required_loggers() for logger in loggers: if not isinstance(logger, BTLoggers): raise TypeError( f"Logger must be a BTLoggers enum member, got {type(logger)}" ) - _required_loggers.add(logger) + required_loggers.add(logger) CONF_BLE_ID = "ble_id" @@ -488,8 +494,9 @@ async def to_code(config): # Apply logger settings if log disabling is enabled if config.get(CONF_DISABLE_BT_LOGS, False): # Disable all Bluetooth loggers that are not required + required_loggers = _get_required_loggers() for logger in BTLoggers: - if logger not in _required_loggers: + if logger not in required_loggers: add_idf_sdkconfig_option(f"{logger.value}_NONE", True) # Set BLE connection establishment timeout to match aioesphomeapi/bleak-retry-connector From 18062d154f236fdc714f4a94d4bde3df068a2ccc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:18:30 -1000 Subject: [PATCH 07/31] [esp32_ble_tracker] Refactor to use CORE.data instead of module-level globals (#11220) --- .../components/esp32_ble_tracker/__init__.py | 50 ++++++++++++------- 1 file changed, 32 insertions(+), 18 deletions(-) diff --git a/esphome/components/esp32_ble_tracker/__init__.py b/esphome/components/esp32_ble_tracker/__init__.py index 5910be67af..4e25434aad 100644 --- a/esphome/components/esp32_ble_tracker/__init__.py +++ b/esphome/components/esp32_ble_tracker/__init__.py @@ -60,11 +60,21 @@ class RegistrationCounts: clients: int = 0 -# Set to track which features are needed by components -_required_features: set[BLEFeatures] = set() +# CORE.data keys for state management +ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY = "esp32_ble_tracker_required_features" +ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY = "esp32_ble_tracker_registration_counts" -# Track registration counts for StaticVector sizing -_registration_counts = RegistrationCounts() + +def _get_required_features() -> set[BLEFeatures]: + """Get the set of required BLE features from CORE.data.""" + return CORE.data.setdefault(ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY, set()) + + +def _get_registration_counts() -> RegistrationCounts: + """Get the registration counts from CORE.data.""" + return CORE.data.setdefault( + ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY, RegistrationCounts() + ) def register_ble_features(features: set[BLEFeatures]) -> None: @@ -73,7 +83,7 @@ def register_ble_features(features: set[BLEFeatures]) -> None: Args: features: Set of BLEFeatures enum members """ - _required_features.update(features) + _get_required_features().update(features) esp32_ble_tracker_ns = cg.esphome_ns.namespace("esp32_ble_tracker") @@ -267,15 +277,17 @@ async def to_code(config): ): register_ble_features({BLEFeatures.ESP_BT_DEVICE}) + registration_counts = _get_registration_counts() + for conf in config.get(CONF_ON_BLE_ADVERTISE, []): - _registration_counts.listeners += 1 + registration_counts.listeners += 1 trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) if CONF_MAC_ADDRESS in conf: addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]] cg.add(trigger.set_addresses(addr_list)) await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf) for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []): - _registration_counts.listeners += 1 + registration_counts.listeners += 1 trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) if len(conf[CONF_SERVICE_UUID]) == len(bt_uuid16_format): cg.add(trigger.set_service_uuid16(as_hex(conf[CONF_SERVICE_UUID]))) @@ -288,7 +300,7 @@ async def to_code(config): cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex)) await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf) for conf in config.get(CONF_ON_BLE_MANUFACTURER_DATA_ADVERTISE, []): - _registration_counts.listeners += 1 + registration_counts.listeners += 1 trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) if len(conf[CONF_MANUFACTURER_ID]) == len(bt_uuid16_format): cg.add(trigger.set_manufacturer_uuid16(as_hex(conf[CONF_MANUFACTURER_ID]))) @@ -301,7 +313,7 @@ async def to_code(config): cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex)) await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf) for conf in config.get(CONF_ON_SCAN_END, []): - _registration_counts.listeners += 1 + registration_counts.listeners += 1 trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var) await automation.build_automation(trigger, [], conf) @@ -331,19 +343,21 @@ async def to_code(config): @coroutine_with_priority(CoroPriority.FINAL) async def _add_ble_features(): # Add feature-specific defines based on what's needed - if BLEFeatures.ESP_BT_DEVICE in _required_features: + required_features = _get_required_features() + if BLEFeatures.ESP_BT_DEVICE in required_features: cg.add_define("USE_ESP32_BLE_DEVICE") cg.add_define("USE_ESP32_BLE_UUID") # Add defines for StaticVector sizing based on registration counts # Only define if count > 0 to avoid allocating unnecessary memory - if _registration_counts.listeners > 0: + registration_counts = _get_registration_counts() + if registration_counts.listeners > 0: cg.add_define( - "ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", _registration_counts.listeners + "ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", registration_counts.listeners ) - if _registration_counts.clients > 0: + if registration_counts.clients > 0: cg.add_define( - "ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", _registration_counts.clients + "ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", registration_counts.clients ) @@ -395,7 +409,7 @@ async def register_ble_device( var: cg.SafeExpType, config: ConfigType ) -> cg.SafeExpType: register_ble_features({BLEFeatures.ESP_BT_DEVICE}) - _registration_counts.listeners += 1 + _get_registration_counts().listeners += 1 paren = await cg.get_variable(config[CONF_ESP32_BLE_ID]) cg.add(paren.register_listener(var)) return var @@ -403,7 +417,7 @@ async def register_ble_device( async def register_client(var: cg.SafeExpType, config: ConfigType) -> cg.SafeExpType: register_ble_features({BLEFeatures.ESP_BT_DEVICE}) - _registration_counts.clients += 1 + _get_registration_counts().clients += 1 paren = await cg.get_variable(config[CONF_ESP32_BLE_ID]) cg.add(paren.register_client(var)) return var @@ -417,7 +431,7 @@ async def register_raw_ble_device( This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice will not be compiled in if this is the only registration method used. """ - _registration_counts.listeners += 1 + _get_registration_counts().listeners += 1 paren = await cg.get_variable(config[CONF_ESP32_BLE_ID]) cg.add(paren.register_listener(var)) return var @@ -431,7 +445,7 @@ async def register_raw_client( This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice will not be compiled in if this is the only registration method used. """ - _registration_counts.clients += 1 + _get_registration_counts().clients += 1 paren = await cg.get_variable(config[CONF_ESP32_BLE_ID]) cg.add(paren.register_client(var)) return var From 6943b1d985adee3f94158b318ed166b8387bd9ff Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:22:08 -1000 Subject: [PATCH 08/31] [api] Use FixedVector for ExecuteServiceRequest/Argument arrays to eliminate reallocations (#11270) --- esphome/components/api/api.proto | 10 ++-- esphome/components/api/api_pb2.cpp | 16 ++++++ esphome/components/api/api_pb2.h | 12 ++-- esphome/components/api/proto.cpp | 71 ++++++++++++++++++++++-- esphome/components/api/proto.h | 24 +++++++- esphome/components/api/user_services.cpp | 8 +-- esphome/components/api/user_services.h | 2 +- script/api_protobuf/api_protobuf.py | 36 ++++++++++-- 8 files changed, 153 insertions(+), 26 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 30e3cfa056..753adc3592 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -876,10 +876,10 @@ message ExecuteServiceArgument { string string_ = 4; // ESPHome 1.14 (api v1.3) make int a signed value sint32 int_ = 5; - repeated bool bool_array = 6 [packed=false]; - repeated sint32 int_array = 7 [packed=false]; - repeated float float_array = 8 [packed=false]; - repeated string string_array = 9; + repeated bool bool_array = 6 [packed=false, (fixed_vector) = true]; + repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true]; + repeated float float_array = 8 [packed=false, (fixed_vector) = true]; + repeated string string_array = 9 [(fixed_vector) = true]; } message ExecuteServiceRequest { option (id) = 42; @@ -888,7 +888,7 @@ message ExecuteServiceRequest { option (ifdef) = "USE_API_SERVICES"; fixed32 key = 1; - repeated ExecuteServiceArgument args = 2; + repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true]; } // ==================== CAMERA ==================== diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index ee25d4c02d..37bcf5d8a0 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -1064,6 +1064,17 @@ bool ExecuteServiceArgument::decode_32bit(uint32_t field_id, Proto32Bit value) { } return true; } +void ExecuteServiceArgument::decode(const uint8_t *buffer, size_t length) { + uint32_t count_bool_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 6); + this->bool_array.init(count_bool_array); + uint32_t count_int_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 7); + this->int_array.init(count_int_array); + uint32_t count_float_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 8); + this->float_array.init(count_float_array); + uint32_t count_string_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 9); + this->string_array.init(count_string_array); + ProtoDecodableMessage::decode(buffer, length); +} bool ExecuteServiceRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) { switch (field_id) { case 2: @@ -1085,6 +1096,11 @@ bool ExecuteServiceRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { } return true; } +void ExecuteServiceRequest::decode(const uint8_t *buffer, size_t length) { + uint32_t count_args = ProtoDecodableMessage::count_repeated_field(buffer, length, 2); + this->args.init(count_args); + ProtoDecodableMessage::decode(buffer, length); +} #endif #ifdef USE_CAMERA void ListEntitiesCameraResponse::encode(ProtoWriteBuffer buffer) const { diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index fca3f546b5..5603204801 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -1279,10 +1279,11 @@ class ExecuteServiceArgument final : public ProtoDecodableMessage { float float_{0.0f}; std::string string_{}; int32_t int_{0}; - std::vector bool_array{}; - std::vector int_array{}; - std::vector float_array{}; - std::vector string_array{}; + FixedVector bool_array{}; + FixedVector int_array{}; + FixedVector float_array{}; + FixedVector string_array{}; + void decode(const uint8_t *buffer, size_t length) override; #ifdef HAS_PROTO_MESSAGE_DUMP void dump_to(std::string &out) const override; #endif @@ -1300,7 +1301,8 @@ class ExecuteServiceRequest final : public ProtoDecodableMessage { const char *message_name() const override { return "execute_service_request"; } #endif uint32_t key{0}; - std::vector args{}; + FixedVector args{}; + void decode(const uint8_t *buffer, size_t length) override; #ifdef HAS_PROTO_MESSAGE_DUMP void dump_to(std::string &out) const override; #endif diff --git a/esphome/components/api/proto.cpp b/esphome/components/api/proto.cpp index afda5d32ba..4f0d0846d7 100644 --- a/esphome/components/api/proto.cpp +++ b/esphome/components/api/proto.cpp @@ -7,6 +7,69 @@ namespace esphome::api { static const char *const TAG = "api.proto"; +uint32_t ProtoDecodableMessage::count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id) { + uint32_t count = 0; + const uint8_t *ptr = buffer; + const uint8_t *end = buffer + length; + + while (ptr < end) { + uint32_t consumed; + + // Parse field header (tag) + auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed); + if (!res.has_value()) { + break; // Invalid data, stop counting + } + + uint32_t tag = res->as_uint32(); + uint32_t field_type = tag & WIRE_TYPE_MASK; + uint32_t field_id = tag >> 3; + ptr += consumed; + + // Count if this is the target field + if (field_id == target_field_id) { + count++; + } + + // Skip field data based on wire type + switch (field_type) { + case WIRE_TYPE_VARINT: { // VarInt - parse and skip + res = ProtoVarInt::parse(ptr, end - ptr, &consumed); + if (!res.has_value()) { + return count; // Invalid data, return what we have + } + ptr += consumed; + break; + } + case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited - parse length and skip data + res = ProtoVarInt::parse(ptr, end - ptr, &consumed); + if (!res.has_value()) { + return count; + } + uint32_t field_length = res->as_uint32(); + ptr += consumed; + if (ptr + field_length > end) { + return count; // Out of bounds + } + ptr += field_length; + break; + } + case WIRE_TYPE_FIXED32: { // 32-bit - skip 4 bytes + if (ptr + 4 > end) { + return count; + } + ptr += 4; + break; + } + default: + // Unknown wire type, can't continue + return count; + } + } + + return count; +} + void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) { const uint8_t *ptr = buffer; const uint8_t *end = buffer + length; @@ -22,12 +85,12 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) { } uint32_t tag = res->as_uint32(); - uint32_t field_type = tag & 0b111; + uint32_t field_type = tag & WIRE_TYPE_MASK; uint32_t field_id = tag >> 3; ptr += consumed; switch (field_type) { - case 0: { // VarInt + case WIRE_TYPE_VARINT: { // VarInt res = ProtoVarInt::parse(ptr, end - ptr, &consumed); if (!res.has_value()) { ESP_LOGV(TAG, "Invalid VarInt at offset %ld", (long) (ptr - buffer)); @@ -39,7 +102,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) { ptr += consumed; break; } - case 2: { // Length-delimited + case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited res = ProtoVarInt::parse(ptr, end - ptr, &consumed); if (!res.has_value()) { ESP_LOGV(TAG, "Invalid Length Delimited at offset %ld", (long) (ptr - buffer)); @@ -57,7 +120,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) { ptr += field_length; break; } - case 5: { // 32-bit + case WIRE_TYPE_FIXED32: { // 32-bit if (ptr + 4 > end) { ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at offset %ld", (long) (ptr - buffer)); return; diff --git a/esphome/components/api/proto.h b/esphome/components/api/proto.h index a6a09bf7c5..e7585924a5 100644 --- a/esphome/components/api/proto.h +++ b/esphome/components/api/proto.h @@ -15,6 +15,13 @@ namespace esphome::api { +// Protocol Buffer wire type constants +// See https://protobuf.dev/programming-guides/encoding/#structure +constexpr uint8_t WIRE_TYPE_VARINT = 0; // int32, int64, uint32, uint64, sint32, sint64, bool, enum +constexpr uint8_t WIRE_TYPE_LENGTH_DELIMITED = 2; // string, bytes, embedded messages, packed repeated fields +constexpr uint8_t WIRE_TYPE_FIXED32 = 5; // fixed32, sfixed32, float +constexpr uint8_t WIRE_TYPE_MASK = 0b111; // Mask to extract wire type from tag + // Helper functions for ZigZag encoding/decoding inline constexpr uint32_t encode_zigzag32(int32_t value) { return (static_cast(value) << 1) ^ (static_cast(value >> 31)); @@ -241,7 +248,7 @@ class ProtoWriteBuffer { * Following https://protobuf.dev/programming-guides/encoding/#structure */ void encode_field_raw(uint32_t field_id, uint32_t type) { - uint32_t val = (field_id << 3) | (type & 0b111); + uint32_t val = (field_id << 3) | (type & WIRE_TYPE_MASK); this->encode_varint_raw(val); } void encode_string(uint32_t field_id, const char *string, size_t len, bool force = false) { @@ -354,7 +361,18 @@ class ProtoMessage { // Base class for messages that support decoding class ProtoDecodableMessage : public ProtoMessage { public: - void decode(const uint8_t *buffer, size_t length); + virtual void decode(const uint8_t *buffer, size_t length); + + /** + * Count occurrences of a repeated field in a protobuf buffer. + * This is a lightweight scan that only parses tags and skips field data. + * + * @param buffer Pointer to the protobuf buffer + * @param length Length of the buffer in bytes + * @param target_field_id The field ID to count + * @return Number of times the field appears in the buffer + */ + static uint32_t count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id); protected: virtual bool decode_varint(uint32_t field_id, ProtoVarInt value) { return false; } @@ -482,7 +500,7 @@ class ProtoSize { * @return The number of bytes needed to encode the field ID and wire type */ static constexpr uint32_t field(uint32_t field_id, uint32_t type) { - uint32_t tag = (field_id << 3) | (type & 0b111); + uint32_t tag = (field_id << 3) | (type & WIRE_TYPE_MASK); return varint(tag); } diff --git a/esphome/components/api/user_services.cpp b/esphome/components/api/user_services.cpp index 27b30eb332..3cbf2ab5f9 100644 --- a/esphome/components/api/user_services.cpp +++ b/esphome/components/api/user_services.cpp @@ -12,16 +12,16 @@ template<> int32_t get_execute_arg_value(const ExecuteServiceArgument & template<> float get_execute_arg_value(const ExecuteServiceArgument &arg) { return arg.float_; } template<> std::string get_execute_arg_value(const ExecuteServiceArgument &arg) { return arg.string_; } template<> std::vector get_execute_arg_value>(const ExecuteServiceArgument &arg) { - return arg.bool_array; + return std::vector(arg.bool_array.begin(), arg.bool_array.end()); } template<> std::vector get_execute_arg_value>(const ExecuteServiceArgument &arg) { - return arg.int_array; + return std::vector(arg.int_array.begin(), arg.int_array.end()); } template<> std::vector get_execute_arg_value>(const ExecuteServiceArgument &arg) { - return arg.float_array; + return std::vector(arg.float_array.begin(), arg.float_array.end()); } template<> std::vector get_execute_arg_value>(const ExecuteServiceArgument &arg) { - return arg.string_array; + return std::vector(arg.string_array.begin(), arg.string_array.end()); } template<> enums::ServiceArgType to_service_arg_type() { return enums::SERVICE_ARG_TYPE_BOOL; } diff --git a/esphome/components/api/user_services.h b/esphome/components/api/user_services.h index 29843a2f78..9ca5e1093e 100644 --- a/esphome/components/api/user_services.h +++ b/esphome/components/api/user_services.h @@ -55,7 +55,7 @@ template class UserServiceBase : public UserServiceDescriptor { protected: virtual void execute(Ts... x) = 0; - template void execute_(const std::vector &args, seq type) { + template void execute_(const ArgsContainer &args, seq type) { this->execute((get_execute_arg_value(args[S]))...); } diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 9a55f1d136..4936434fc2 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -11,6 +11,7 @@ from typing import Any import aioesphomeapi.api_options_pb2 as pb import google.protobuf.descriptor_pb2 as descriptor +from google.protobuf.descriptor_pb2 import FieldDescriptorProto class WireType(IntEnum): @@ -148,7 +149,7 @@ class TypeInfo(ABC): @property def repeated(self) -> bool: """Check if the field is repeated.""" - return self._field.label == 3 + return self._field.label == FieldDescriptorProto.LABEL_REPEATED @property def wire_type(self) -> WireType: @@ -337,7 +338,7 @@ def create_field_type_info( needs_encode: bool = True, ) -> TypeInfo: """Create the appropriate TypeInfo instance for a field, handling repeated fields and custom options.""" - if field.label == 3: # repeated + if field.label == FieldDescriptorProto.LABEL_REPEATED: # Check if this repeated field has fixed_array_with_length_define option if ( fixed_size := get_field_opt(field, pb.fixed_array_with_length_define) @@ -1879,6 +1880,9 @@ def build_message_type( ) public_content.append("#endif") + # Collect fixed_vector fields for custom decode generation + fixed_vector_fields = [] + for field in desc.field: # Skip deprecated fields completely if field.options.deprecated: @@ -1887,7 +1891,7 @@ def build_message_type( # Validate that fixed_array_size is only used in encode-only messages if ( needs_decode - and field.label == 3 + and field.label == FieldDescriptorProto.LABEL_REPEATED and get_field_opt(field, pb.fixed_array_size) is not None ): raise ValueError( @@ -1900,7 +1904,7 @@ def build_message_type( # Validate that fixed_array_with_length_define is only used in encode-only messages if ( needs_decode - and field.label == 3 + and field.label == FieldDescriptorProto.LABEL_REPEATED and get_field_opt(field, pb.fixed_array_with_length_define) is not None ): raise ValueError( @@ -1910,6 +1914,14 @@ def build_message_type( f"since we cannot trust or control the number of items received from clients." ) + # Collect fixed_vector repeated fields for custom decode generation + if ( + needs_decode + and field.label == FieldDescriptorProto.LABEL_REPEATED + and get_field_opt(field, pb.fixed_vector, False) + ): + fixed_vector_fields.append((field.name, field.number)) + ti = create_field_type_info(field, needs_decode, needs_encode) # Skip field declarations for fields that are in the base class @@ -2018,6 +2030,22 @@ def build_message_type( prot = "bool decode_64bit(uint32_t field_id, Proto64Bit value) override;" protected_content.insert(0, prot) + # Generate custom decode() override for messages with FixedVector fields + if fixed_vector_fields: + # Generate the decode() implementation in cpp + o = f"void {desc.name}::decode(const uint8_t *buffer, size_t length) {{\n" + # Count and init each FixedVector field + for field_name, field_number in fixed_vector_fields: + o += f" uint32_t count_{field_name} = ProtoDecodableMessage::count_repeated_field(buffer, length, {field_number});\n" + o += f" this->{field_name}.init(count_{field_name});\n" + # Call parent decode to populate the fields + o += " ProtoDecodableMessage::decode(buffer, length);\n" + o += "}\n" + cpp += o + # Generate the decode() declaration in header (public method) + prot = "void decode(const uint8_t *buffer, size_t length) override;" + public_content.append(prot) + # Only generate encode method if this message needs encoding and has fields if needs_encode and encode: o = f"void {desc.name}::encode(ProtoWriteBuffer buffer) const {{" From f2e0a412db1e4477b915160a0b3b7fe3eff9e46c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:23:20 -1000 Subject: [PATCH 09/31] [substitutions] Fix AttributeError when using packages with substitutions (#11274) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/components/substitutions/__init__.py | 10 +- esphome/config.py | 9 +- esphome/config_helpers.py | 26 ++- tests/unit_tests/test_substitutions.py | 204 ++++++++++++++++++- 4 files changed, 236 insertions(+), 13 deletions(-) diff --git a/esphome/components/substitutions/__init__.py b/esphome/components/substitutions/__init__.py index 1a1736aed1..e6bcdc063a 100644 --- a/esphome/components/substitutions/__init__.py +++ b/esphome/components/substitutions/__init__.py @@ -1,7 +1,7 @@ import logging from esphome import core -from esphome.config_helpers import Extend, Remove, merge_config +from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered import esphome.config_validation as cv from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base @@ -170,10 +170,10 @@ def do_substitution_pass(config, command_line_substitutions, ignore_missing=Fals return # Merge substitutions in config, overriding with substitutions coming from command line: - substitutions = { - **config.get(CONF_SUBSTITUTIONS, {}), - **(command_line_substitutions or {}), - } + # Use merge_dicts_ordered to preserve OrderedDict type for move_to_end() + substitutions = merge_dicts_ordered( + config.get(CONF_SUBSTITUTIONS, {}), command_line_substitutions or {} + ) with cv.prepend_path("substitutions"): if not isinstance(substitutions, dict): raise cv.Invalid( diff --git a/esphome/config.py b/esphome/config.py index 10a5733575..6adecb5c65 100644 --- a/esphome/config.py +++ b/esphome/config.py @@ -12,7 +12,7 @@ from typing import Any import voluptuous as vol from esphome import core, loader, pins, yaml_util -from esphome.config_helpers import Extend, Remove +from esphome.config_helpers import Extend, Remove, merge_dicts_ordered import esphome.config_validation as cv from esphome.const import ( CONF_ESPHOME, @@ -922,10 +922,9 @@ def validate_config( if CONF_SUBSTITUTIONS in config or command_line_substitutions: from esphome.components import substitutions - result[CONF_SUBSTITUTIONS] = { - **(config.get(CONF_SUBSTITUTIONS) or {}), - **command_line_substitutions, - } + result[CONF_SUBSTITUTIONS] = merge_dicts_ordered( + config.get(CONF_SUBSTITUTIONS) or {}, command_line_substitutions + ) result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS) try: substitutions.do_substitution_pass(config, command_line_substitutions) diff --git a/esphome/config_helpers.py b/esphome/config_helpers.py index 00cd8f9818..88cfa49fdc 100644 --- a/esphome/config_helpers.py +++ b/esphome/config_helpers.py @@ -10,6 +10,7 @@ from esphome.const import ( PlatformFramework, ) from esphome.core import CORE +from esphome.util import OrderedDict # Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum _PLATFORM_FRAMEWORK_LOOKUP = { @@ -17,6 +18,25 @@ _PLATFORM_FRAMEWORK_LOOKUP = { } +def merge_dicts_ordered(*dicts: dict) -> OrderedDict: + """Merge multiple dicts into an OrderedDict, preserving key order. + + This is a helper to ensure that dictionary merging preserves OrderedDict type, + which is important for operations like move_to_end(). + + Args: + *dicts: Variable number of dictionaries to merge (later dicts override earlier ones) + + Returns: + OrderedDict with merged contents + """ + result = OrderedDict() + for d in dicts: + if d: + result.update(d) + return result + + class Extend: def __init__(self, value): self.value = value @@ -60,7 +80,11 @@ def merge_config(full_old, full_new): if isinstance(new, dict): if not isinstance(old, dict): return new - res = old.copy() + # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict + if isinstance(old, OrderedDict) or isinstance(new, OrderedDict): + res = OrderedDict(old) + else: + res = old.copy() for k, v in new.items(): if isinstance(v, Remove) and k in old: del res[k] diff --git a/tests/unit_tests/test_substitutions.py b/tests/unit_tests/test_substitutions.py index dd419aba9c..59396a4a83 100644 --- a/tests/unit_tests/test_substitutions.py +++ b/tests/unit_tests/test_substitutions.py @@ -2,9 +2,12 @@ import glob import logging from pathlib import Path -from esphome import yaml_util +from esphome import config as config_module, yaml_util from esphome.components import substitutions -from esphome.const import CONF_PACKAGES +from esphome.config_helpers import merge_config +from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS +from esphome.core import CORE +from esphome.util import OrderedDict _LOGGER = logging.getLogger(__name__) @@ -118,3 +121,200 @@ def test_substitutions_fixtures(fixture_path): if DEV_MODE: _LOGGER.error("Tests passed, but Dev mode is enabled.") assert not DEV_MODE # make sure DEV_MODE is disabled after you are finished. + + +def test_substitutions_with_command_line_maintains_ordered_dict() -> None: + """Test that substitutions remain an OrderedDict when command line substitutions are provided, + and that move_to_end() can be called successfully. + + This is a regression test for https://github.com/esphome/esphome/issues/11182 + where the config would become a regular dict and fail when move_to_end() was called. + """ + # Create an OrderedDict config with substitutions + config = OrderedDict() + config["esphome"] = {"name": "test"} + config[CONF_SUBSTITUTIONS] = {"var1": "value1", "var2": "value2"} + config["other_key"] = "other_value" + + # Command line substitutions that should override + command_line_subs = {"var2": "override", "var3": "new_value"} + + # Call do_substitution_pass with command line substitutions + substitutions.do_substitution_pass(config, command_line_subs) + + # Verify that config is still an OrderedDict + assert isinstance(config, OrderedDict), "Config should remain an OrderedDict" + + # Verify substitutions are at the beginning (move_to_end with last=False) + keys = list(config.keys()) + assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key" + + # Verify substitutions were properly merged + assert config[CONF_SUBSTITUTIONS]["var1"] == "value1" + assert config[CONF_SUBSTITUTIONS]["var2"] == "override" + assert config[CONF_SUBSTITUTIONS]["var3"] == "new_value" + + # Verify config[CONF_SUBSTITUTIONS] is also an OrderedDict + assert isinstance(config[CONF_SUBSTITUTIONS], OrderedDict), ( + "Substitutions should be an OrderedDict" + ) + + +def test_substitutions_without_command_line_maintains_ordered_dict() -> None: + """Test that substitutions work correctly without command line substitutions.""" + config = OrderedDict() + config["esphome"] = {"name": "test"} + config[CONF_SUBSTITUTIONS] = {"var1": "value1"} + config["other_key"] = "other_value" + + # Call without command line substitutions + substitutions.do_substitution_pass(config, None) + + # Verify that config is still an OrderedDict + assert isinstance(config, OrderedDict), "Config should remain an OrderedDict" + + # Verify substitutions are at the beginning + keys = list(config.keys()) + assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key" + + +def test_substitutions_after_merge_config_maintains_ordered_dict() -> None: + """Test that substitutions work after merge_config (packages scenario). + + This is a regression test for https://github.com/esphome/esphome/issues/11182 + where using packages would cause config to become a regular dict, breaking move_to_end(). + """ + # Simulate what happens with packages - merge two OrderedDict configs + base_config = OrderedDict() + base_config["esphome"] = {"name": "base"} + base_config[CONF_SUBSTITUTIONS] = {"var1": "value1"} + + package_config = OrderedDict() + package_config["sensor"] = [{"platform": "template"}] + package_config[CONF_SUBSTITUTIONS] = {"var2": "value2"} + + # Merge configs (simulating package merge) + merged_config = merge_config(base_config, package_config) + + # Verify merged config is still an OrderedDict + assert isinstance(merged_config, OrderedDict), ( + "Merged config should be an OrderedDict" + ) + + # Now try to run substitution pass on the merged config + substitutions.do_substitution_pass(merged_config, None) + + # Should not raise AttributeError + assert isinstance(merged_config, OrderedDict), ( + "Config should still be OrderedDict after substitution pass" + ) + keys = list(merged_config.keys()) + assert keys[0] == CONF_SUBSTITUTIONS, "Substitutions should be first key" + + +def test_validate_config_with_command_line_substitutions_maintains_ordered_dict( + tmp_path, +) -> None: + """Test that validate_config preserves OrderedDict when merging command-line substitutions. + + This tests the code path in config.py where result[CONF_SUBSTITUTIONS] is set + using merge_dicts_ordered() with command-line substitutions provided. + """ + # Create a minimal valid config + test_config = OrderedDict() + test_config["esphome"] = {"name": "test_device", "platform": "ESP32"} + test_config[CONF_SUBSTITUTIONS] = OrderedDict({"var1": "value1", "var2": "value2"}) + test_config["esp32"] = {"board": "esp32dev"} + + # Command line substitutions that should override + command_line_subs = {"var2": "override", "var3": "new_value"} + + # Set up CORE for the test with a proper Path object + test_yaml = tmp_path / "test.yaml" + test_yaml.write_text("# test config") + CORE.config_path = test_yaml + + # Call validate_config with command line substitutions + result = config_module.validate_config(test_config, command_line_subs) + + # Verify that result[CONF_SUBSTITUTIONS] is an OrderedDict + assert isinstance(result.get(CONF_SUBSTITUTIONS), OrderedDict), ( + "Result substitutions should be an OrderedDict" + ) + + # Verify substitutions were properly merged + assert result[CONF_SUBSTITUTIONS]["var1"] == "value1" + assert result[CONF_SUBSTITUTIONS]["var2"] == "override" + assert result[CONF_SUBSTITUTIONS]["var3"] == "new_value" + + +def test_validate_config_without_command_line_substitutions_maintains_ordered_dict( + tmp_path, +) -> None: + """Test that validate_config preserves OrderedDict without command-line substitutions. + + This tests the code path in config.py where result[CONF_SUBSTITUTIONS] is set + using merge_dicts_ordered() when command_line_substitutions is None. + """ + # Create a minimal valid config + test_config = OrderedDict() + test_config["esphome"] = {"name": "test_device", "platform": "ESP32"} + test_config[CONF_SUBSTITUTIONS] = OrderedDict({"var1": "value1", "var2": "value2"}) + test_config["esp32"] = {"board": "esp32dev"} + + # Set up CORE for the test with a proper Path object + test_yaml = tmp_path / "test.yaml" + test_yaml.write_text("# test config") + CORE.config_path = test_yaml + + # Call validate_config without command line substitutions + result = config_module.validate_config(test_config, None) + + # Verify that result[CONF_SUBSTITUTIONS] is an OrderedDict + assert isinstance(result.get(CONF_SUBSTITUTIONS), OrderedDict), ( + "Result substitutions should be an OrderedDict" + ) + + # Verify substitutions are unchanged + assert result[CONF_SUBSTITUTIONS]["var1"] == "value1" + assert result[CONF_SUBSTITUTIONS]["var2"] == "value2" + + +def test_merge_config_preserves_ordered_dict() -> None: + """Test that merge_config preserves OrderedDict type. + + This is a regression test to ensure merge_config doesn't lose OrderedDict type + when merging configs, which causes AttributeError on move_to_end(). + """ + # Test OrderedDict + dict = OrderedDict + od = OrderedDict([("a", 1), ("b", 2)]) + d = {"b": 20, "c": 3} + result = merge_config(od, d) + assert isinstance(result, OrderedDict), ( + "OrderedDict + dict should return OrderedDict" + ) + + # Test dict + OrderedDict = OrderedDict + d = {"a": 1, "b": 2} + od = OrderedDict([("b", 20), ("c", 3)]) + result = merge_config(d, od) + assert isinstance(result, OrderedDict), ( + "dict + OrderedDict should return OrderedDict" + ) + + # Test OrderedDict + OrderedDict = OrderedDict + od1 = OrderedDict([("a", 1), ("b", 2)]) + od2 = OrderedDict([("b", 20), ("c", 3)]) + result = merge_config(od1, od2) + assert isinstance(result, OrderedDict), ( + "OrderedDict + OrderedDict should return OrderedDict" + ) + + # Test that dict + dict still returns regular dict (no unnecessary conversion) + d1 = {"a": 1, "b": 2} + d2 = {"b": 20, "c": 3} + result = merge_config(d1, d2) + assert isinstance(result, dict), "dict + dict should return dict" + assert not isinstance(result, OrderedDict), ( + "dict + dict should not return OrderedDict" + ) From 14d76e9e4e3cc4d36ed2ba444bb03504edad573e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:36:03 -1000 Subject: [PATCH 10/31] [ci] Merge components with different buses to reduce CI time (#11251) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- .github/workflows/ci.yml | 62 ++- esphome/components/esp8266/__init__.py | 13 +- esphome/components/esp8266/iram_fix.py.script | 44 +++ script/analyze_component_buses.py | 142 +++++++ script/list-components.py | 39 +- script/merge_component_configs.py | 136 +++++-- script/split_components_for_ci.py | 95 ++++- script/test_build_components.py | 373 +++++++++++++----- .../components/ade7880/test.esp8266-ard.yaml | 2 +- tests/components/ade7953_i2c/common.yaml | 3 + tests/components/ade7953_spi/common.yaml | 6 +- tests/components/as3935_i2c/common.yaml | 3 + tests/components/as3935_spi/common.yaml | 3 + tests/components/axs15231/common.yaml | 4 +- tests/components/bme280_i2c/common.yaml | 6 +- tests/components/bme280_spi/common.yaml | 6 +- tests/components/bmp280_i2c/common.yaml | 4 +- tests/components/bmp280_spi/common.yaml | 4 +- tests/components/bmp3xx_i2c/common.yaml | 2 + tests/components/bmp3xx_spi/common.yaml | 2 + tests/components/camera/test.esp32-idf.yaml | 2 +- .../camera_encoder/test.esp32-idf.yaml | 2 +- tests/components/chsc6x/test.esp32-idf.yaml | 2 + tests/components/ektf2232/common.yaml | 4 +- tests/components/ens160_i2c/common.yaml | 3 + tests/components/ens160_spi/common.yaml | 3 + .../esp32_camera/test.esp32-idf.yaml | 2 +- .../test.esp32-idf.yaml | 2 +- tests/components/font/common.yaml | 1 + tests/components/ft63x6/test.esp8266-ard.yaml | 2 +- tests/components/graph/common.yaml | 1 + .../graphical_display_menu/common.yaml | 4 +- tests/components/gt911/common.yaml | 4 +- tests/components/hx711/test.esp8266-ard.yaml | 4 +- tests/components/ina2xx_i2c/common.yaml | 24 +- tests/components/ina2xx_spi/common.yaml | 24 +- tests/components/lilygo_t5_47/common.yaml | 4 +- tests/components/pn532_i2c/common.yaml | 4 +- tests/components/pn532_spi/common.yaml | 4 +- tests/components/pn7160_i2c/common.yaml | 18 +- tests/components/pn7160_spi/common.yaml | 18 +- tests/components/rc522_i2c/common.yaml | 4 +- tests/components/rc522_spi/common.yaml | 6 +- .../selec_meter/test.esp8266-ard.yaml | 2 +- .../sn74hc595/test.esp8266-ard.yaml | 4 +- tests/components/ssd1306_i2c/common.yaml | 2 +- tests/components/ssd1306_spi/common.yaml | 1 + tests/components/ssd1327_i2c/common.yaml | 2 +- tests/components/ssd1327_spi/common.yaml | 1 + tests/components/st7567_i2c/common.yaml | 2 +- tests/components/st7567_spi/common.yaml | 1 + tests/components/syslog/common.yaml | 3 +- tests/components/tt21100/common.yaml | 4 +- .../components/wk2132_i2c/test.esp32-idf.yaml | 1 + .../wk2132_i2c/test.esp32-s3-idf.yaml | 1 + tests/components/wk2132_spi/common.yaml | 8 +- .../components/wk2132_spi/test.esp32-idf.yaml | 1 + .../wk2132_spi/test.esp32-s3-idf.yaml | 1 + .../components/wk2168_i2c/test.esp32-idf.yaml | 1 + .../wk2168_i2c/test.esp32-s3-idf.yaml | 1 + .../components/wk2168_spi/test.esp32-idf.yaml | 1 + .../wk2168_spi/test.esp32-s3-idf.yaml | 1 + .../components/wk2204_i2c/test.esp32-idf.yaml | 1 + .../wk2204_i2c/test.esp32-s3-idf.yaml | 1 + tests/components/wk2204_spi/common.yaml | 12 +- .../components/wk2204_spi/test.esp32-idf.yaml | 1 + .../wk2204_spi/test.esp32-s3-idf.yaml | 1 + .../components/wk2212_i2c/test.esp32-idf.yaml | 1 + .../wk2212_i2c/test.esp32-s3-idf.yaml | 1 + .../components/wk2212_spi/test.esp32-idf.yaml | 1 + .../wk2212_spi/test.esp32-s3-idf.yaml | 1 + .../build_components_base.esp32-idf.yaml | 6 +- .../{camera => i2c_camera}/esp32-idf.yaml | 11 +- .../common/uart_bridge_2/esp32-idf.yaml | 11 + .../common/uart_bridge_2/esp32-s3-idf.yaml | 11 + .../common/uart_bridge_4/esp32-idf.yaml | 11 + .../common/uart_bridge_4/esp32-s3-idf.yaml | 11 + .../partitions_testing.csv | 10 + 78 files changed, 954 insertions(+), 266 deletions(-) create mode 100644 esphome/components/esp8266/iram_fix.py.script rename tests/test_build_components/common/{camera => i2c_camera}/esp32-idf.yaml (83%) create mode 100644 tests/test_build_components/common/uart_bridge_2/esp32-idf.yaml create mode 100644 tests/test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml create mode 100644 tests/test_build_components/common/uart_bridge_4/esp32-idf.yaml create mode 100644 tests/test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml create mode 100644 tests/test_build_components/partitions_testing.csv diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 163e9ab9ec..87e182fe4d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -379,7 +379,16 @@ jobs: # Use intelligent splitter that groups components with same bus configs components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}' - directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' + + # Only isolate directly changed components when targeting dev branch + # For beta/release branches, group everything for faster CI + if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then + directly_changed='[]' + echo "Target branch: ${{ github.base_ref }} - grouping all components" + else + directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' + echo "Target branch: ${{ github.base_ref }} - isolating directly changed components" + fi echo "Splitting components intelligently..." output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github) @@ -396,7 +405,7 @@ jobs: if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 strategy: fail-fast: false - max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }} + max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }} matrix: components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }} steps: @@ -424,18 +433,31 @@ jobs: - name: Validate and compile components with intelligent grouping run: | . venv/bin/activate - # Use /mnt for build files (70GB available vs ~29GB on /) - # Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there) - sudo mkdir -p /mnt/platformio - sudo chown $USER:$USER /mnt/platformio - mkdir -p ~/.platformio - sudo mount --bind /mnt/platformio ~/.platformio - # Bind mount test build directory to /mnt - sudo mkdir -p /mnt/test_build_components_build - sudo chown $USER:$USER /mnt/test_build_components_build - mkdir -p tests/test_build_components/build - sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build + # Check if /mnt has more free space than / before bind mounting + # Extract available space in KB for comparison + root_avail=$(df -k / | awk 'NR==2 {print $4}') + mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}') + + echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB" + + # Only use /mnt if it has more space than / + if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then + echo "Using /mnt for build files (more space available)" + # Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there) + sudo mkdir -p /mnt/platformio + sudo chown $USER:$USER /mnt/platformio + mkdir -p ~/.platformio + sudo mount --bind /mnt/platformio ~/.platformio + + # Bind mount test build directory to /mnt + sudo mkdir -p /mnt/test_build_components_build + sudo chown $USER:$USER /mnt/test_build_components_build + mkdir -p tests/test_build_components/build + sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build + else + echo "Using / for build files (more space available than /mnt or /mnt unavailable)" + fi # Convert space-separated components to comma-separated for Python script components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',') @@ -448,7 +470,7 @@ jobs: # - This catches pin conflicts and other issues in directly changed code # - Grouped tests use --testing-mode to allow config merging (disables some checks) # - Dependencies are safe to group since they weren't modified in this PR - if [ "${{ github.base_ref }}" = "beta" ] || [ "${{ github.base_ref }}" = "release" ]; then + if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then directly_changed_csv="" echo "Testing components: $components_csv" echo "Target branch: ${{ github.base_ref }} - grouping all components" @@ -459,6 +481,11 @@ jobs: fi echo "" + # Show disk space before validation (after bind mounts setup) + echo "Disk space before config validation:" + df -h + echo "" + # Run config validation with grouping and isolation python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv" @@ -466,6 +493,11 @@ jobs: echo "Config validation passed! Starting compilation..." echo "" + # Show disk space before compilation + echo "Disk space before compilation:" + df -h + echo "" + # Run compilation with grouping and isolation python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv" @@ -474,7 +506,7 @@ jobs: runs-on: ubuntu-latest needs: - common - if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release' + if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release') steps: - name: Check out code from GitHub uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index 8a7fbbcb0a..9d8e6b7d1e 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -190,7 +190,7 @@ async def to_code(config): cg.add_define("ESPHOME_VARIANT", "ESP8266") cg.add_define(ThreadModel.SINGLE) - cg.add_platformio_option("extra_scripts", ["post:post_build.py"]) + cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"]) conf = config[CONF_FRAMEWORK] cg.add_platformio_option("framework", "arduino") @@ -230,6 +230,12 @@ async def to_code(config): # For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;` cg.add_build_flag("-DNEW_OOM_ABORT") + # In testing mode, fake a larger IRAM to allow linking grouped component tests + # Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB + # This is done via a pre-build script that generates a custom linker script + if CORE.testing_mode: + cg.add_build_flag("-DESPHOME_TESTING_MODE") + cg.add_platformio_option("board_build.flash_mode", config[CONF_BOARD_FLASH_MODE]) ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] @@ -265,3 +271,8 @@ def copy_files(): post_build_file, CORE.relative_build_path("post_build.py"), ) + iram_fix_file = dir / "iram_fix.py.script" + copy_file_if_changed( + iram_fix_file, + CORE.relative_build_path("iram_fix.py"), + ) diff --git a/esphome/components/esp8266/iram_fix.py.script b/esphome/components/esp8266/iram_fix.py.script new file mode 100644 index 0000000000..96bddc2ced --- /dev/null +++ b/esphome/components/esp8266/iram_fix.py.script @@ -0,0 +1,44 @@ +import os +import re + +# pylint: disable=E0602 +Import("env") # noqa + + +def patch_linker_script_after_preprocess(source, target, env): + """Patch the local linker script after PlatformIO preprocesses it.""" + # Check if we're in testing mode by looking for the define + build_flags = env.get("BUILD_FLAGS", []) + testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + + if not testing_mode: + return + + # Get the local linker script path + build_dir = env.subst("$BUILD_DIR") + local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld") + + if not os.path.exists(local_ld): + return + + # Read the linker script + with open(local_ld, "r") as f: + content = f.read() + + # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) + # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 + updated = re.sub( + r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", + r"\g<1>0x200000", + content, + ) + + if updated != content: + with open(local_ld, "w") as f: + f.write(updated) + print("ESPHome: Patched IRAM size to 2MB for testing mode") + + +# Hook into the build process right before linking +# This runs after PlatformIO has already preprocessed the linker scripts +env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess) diff --git a/script/analyze_component_buses.py b/script/analyze_component_buses.py index 24854178a0..fc7d021cbe 100755 --- a/script/analyze_component_buses.py +++ b/script/analyze_component_buses.py @@ -56,6 +56,10 @@ DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus") # These components can be merged with any other group NO_BUSES_SIGNATURE = "no_buses" +# Prefix for isolated component signatures +# Isolated components have unique signatures and cannot be merged with others +ISOLATED_SIGNATURE_PREFIX = "isolated_" + # Base bus components - these ARE the bus implementations and should not # be flagged as needing migration since they are the platform/base components BASE_BUS_COMPONENTS = { @@ -75,6 +79,7 @@ ISOLATED_COMPONENTS = { "ethernet": "Defines ethernet: which conflicts with wifi: used by most components", "ethernet_info": "Related to ethernet component which conflicts with wifi", "lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs", + "mapping": "Uses dict format for image/display sections incompatible with standard list format - ESPHome merge_config cannot handle", "openthread": "Conflicts with wifi: used by most components", "openthread_info": "Conflicts with wifi: used by most components", "matrix_keypad": "Needs isolation due to keypad", @@ -368,6 +373,143 @@ def analyze_all_components( return components, non_groupable, direct_bus_components +@lru_cache(maxsize=256) +def _get_bus_configs(buses: tuple[str, ...]) -> frozenset[tuple[str, str]]: + """Map bus type to set of configs for that type. + + Args: + buses: Tuple of bus package names (e.g., ("uart_9600", "i2c")) + + Returns: + Frozenset of (base_type, full_config) tuples + Example: frozenset({("uart", "uart_9600"), ("i2c", "i2c")}) + """ + # Split on underscore to get base type: "uart_9600" -> "uart", "i2c" -> "i2c" + return frozenset((bus.split("_", 1)[0], bus) for bus in buses) + + +@lru_cache(maxsize=1024) +def are_buses_compatible(buses1: tuple[str, ...], buses2: tuple[str, ...]) -> bool: + """Check if two bus tuples are compatible for merging. + + Two bus lists are compatible if they don't have conflicting configurations + for the same bus type. For example: + - ("ble", "uart") and ("i2c",) are compatible (different buses) + - ("uart_9600",) and ("uart_19200",) are NOT compatible (same bus, different configs) + - ("uart_9600",) and ("uart_9600",) are compatible (same bus, same config) + + Args: + buses1: First tuple of bus package names + buses2: Second tuple of bus package names + + Returns: + True if buses can be merged without conflicts + """ + configs1 = _get_bus_configs(buses1) + configs2 = _get_bus_configs(buses2) + + # Group configs by base type + bus_types1: dict[str, set[str]] = {} + for base_type, full_config in configs1: + if base_type not in bus_types1: + bus_types1[base_type] = set() + bus_types1[base_type].add(full_config) + + bus_types2: dict[str, set[str]] = {} + for base_type, full_config in configs2: + if base_type not in bus_types2: + bus_types2[base_type] = set() + bus_types2[base_type].add(full_config) + + # Check for conflicts: same bus type with different configs + for bus_type, configs in bus_types1.items(): + if bus_type not in bus_types2: + continue # No conflict - different bus types + # Same bus type - check if configs match + if configs != bus_types2[bus_type]: + return False # Conflict - same bus type, different configs + + return True # No conflicts found + + +def merge_compatible_bus_groups( + grouped_components: dict[tuple[str, str], list[str]], +) -> dict[tuple[str, str], list[str]]: + """Merge groups with compatible (non-conflicting) buses. + + This function takes groups keyed by (platform, bus_signature) and merges + groups that share the same platform and have compatible bus configurations. + Two groups can be merged if their buses don't conflict - meaning they don't + have different configurations for the same bus type. + + For example: + - ["ble"] + ["uart"] = compatible (different buses) + - ["uart_9600"] + ["uart_19200"] = incompatible (same bus, different configs) + - ["uart_9600"] + ["uart_9600"] = compatible (same bus, same config) + + Args: + grouped_components: Dictionary mapping (platform, signature) to list of component names + + Returns: + Dictionary with same structure but with compatible groups merged + """ + merged_groups: dict[tuple[str, str], list[str]] = {} + processed_keys: set[tuple[str, str]] = set() + + for (platform1, sig1), comps1 in sorted(grouped_components.items()): + if (platform1, sig1) in processed_keys: + continue + + # Skip NO_BUSES_SIGNATURE - kept separate for flexible batch distribution + # These components have no bus requirements and can be added to any batch + # as "fillers" for load balancing across CI runners + if sig1 == NO_BUSES_SIGNATURE: + merged_groups[(platform1, sig1)] = comps1 + processed_keys.add((platform1, sig1)) + continue + + # Skip isolated components - they can't be merged with others + if sig1.startswith(ISOLATED_SIGNATURE_PREFIX): + merged_groups[(platform1, sig1)] = comps1 + processed_keys.add((platform1, sig1)) + continue + + # Start with this group's components + merged_comps: list[str] = list(comps1) + merged_sig: str = sig1 + processed_keys.add((platform1, sig1)) + + # Get buses for this group as tuple for caching + buses1: tuple[str, ...] = tuple(sorted(sig1.split("+"))) + + # Try to merge with other groups on same platform + for (platform2, sig2), comps2 in sorted(grouped_components.items()): + if (platform2, sig2) in processed_keys: + continue + if platform2 != platform1: + continue # Different platforms can't be merged + if sig2 == NO_BUSES_SIGNATURE: + continue # Keep separate for flexible batch distribution + if sig2.startswith(ISOLATED_SIGNATURE_PREFIX): + continue # Isolated components can't be merged + + # Check if buses are compatible + buses2: tuple[str, ...] = tuple(sorted(sig2.split("+"))) + if are_buses_compatible(buses1, buses2): + # Compatible! Merge this group + merged_comps.extend(comps2) + processed_keys.add((platform2, sig2)) + # Update merged signature to include all unique buses + all_buses: set[str] = set(buses1) | set(buses2) + merged_sig = "+".join(sorted(all_buses)) + buses1 = tuple(sorted(all_buses)) # Update for next iteration + + # Store merged group + merged_groups[(platform1, merged_sig)] = merged_comps + + return merged_groups + + def create_grouping_signature( platform_buses: dict[str, list[str]], platform: str ) -> str: diff --git a/script/list-components.py b/script/list-components.py index dffff6801a..9abb2bc345 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -185,17 +185,20 @@ def main(): "-c", "--changed", action="store_true", - help="List all components required for testing based on changes (includes dependencies)", + help="List all components with dependencies (used by clang-tidy). " + "When base test infrastructure changes, returns ALL components.", ) parser.add_argument( "--changed-direct", action="store_true", - help="List only directly changed components (without dependencies)", + help="List only directly changed components, ignoring infrastructure changes " + "(used by CI for isolation decisions)", ) parser.add_argument( "--changed-with-deps", action="store_true", - help="Output JSON with both directly changed and all changed components", + help="Output JSON with both directly changed and all changed components " + "(with dependencies), ignoring infrastructure changes (used by CI for test determination)", ) parser.add_argument( "-b", "--branch", help="Branch to compare changed files against" @@ -213,12 +216,34 @@ def main(): # When --changed* is passed, only get the changed files changed = changed_files(args.branch) - # If any base test file(s) changed, there's no need to filter out components - if any("tests/test_build_components" in file for file in changed): - # Need to get all component files + # If any base test file(s) changed, we need to check all components + # BUT only for --changed (used by clang-tidy for comprehensive checking) + # NOT for --changed-direct or --changed-with-deps (used by CI for targeted testing) + # + # Flag usage: + # - --changed: Used by clang-tidy (script/helpers.py get_changed_components) + # Returns: All components with dependencies when base test files change + # Reason: Test infrastructure changes may affect any component + # + # - --changed-direct: Used by CI isolation (script/determine-jobs.py) + # Returns: Only components with actual code changes (not infrastructure) + # Reason: Only directly changed components need isolated testing + # + # - --changed-with-deps: Used by CI test determination (script/determine-jobs.py) + # Returns: Components with code changes + their dependencies (not infrastructure) + # Reason: CI needs to test changed components and their dependents + base_test_changed = any( + "tests/test_build_components" in file for file in changed + ) + + if base_test_changed and not args.changed_direct and not args.changed_with_deps: + # Base test infrastructure changed - load all component files + # This is for --changed (clang-tidy) which needs comprehensive checking files = get_all_component_files() else: - # Only look at changed component files + # Only look at changed component files (ignore infrastructure changes) + # For --changed-direct: only actual component code changes matter (for isolation) + # For --changed-with-deps: only actual component code changes matter (for testing) files = [f for f in changed if filter_component_files(f)] else: # Get all component files diff --git a/script/merge_component_configs.py b/script/merge_component_configs.py index a19b65038c..59774edba9 100755 --- a/script/merge_component_configs.py +++ b/script/merge_component_configs.py @@ -16,6 +16,7 @@ The merger handles: from __future__ import annotations import argparse +from functools import lru_cache from pathlib import Path import re import sys @@ -28,6 +29,10 @@ from esphome import yaml_util from esphome.config_helpers import merge_config from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages +# Prefix for dependency markers in package tracking +# Used to mark packages that are included transitively (e.g., uart via modbus) +DEPENDENCY_MARKER_PREFIX = "_dep_" + def load_yaml_file(yaml_file: Path) -> dict: """Load YAML file using ESPHome's YAML loader. @@ -44,6 +49,34 @@ def load_yaml_file(yaml_file: Path) -> dict: return yaml_util.load_yaml(yaml_file) +@lru_cache(maxsize=256) +def get_component_packages( + component_name: str, platform: str, tests_dir_str: str +) -> dict: + """Get packages dict from a component's test file with caching. + + This function is cached to avoid re-loading and re-parsing the same file + multiple times when extracting packages during cross-bus merging. + + Args: + component_name: Name of the component + platform: Platform name (e.g., "esp32-idf") + tests_dir_str: String path to tests/components directory (must be string for cache hashability) + + Returns: + Dictionary with 'packages' key containing the raw packages dict from the YAML, + or empty dict if no packages section exists + """ + tests_dir = Path(tests_dir_str) + test_file = tests_dir / component_name / f"test.{platform}.yaml" + comp_data = load_yaml_file(test_file) + + if "packages" not in comp_data or not isinstance(comp_data["packages"], dict): + return {} + + return comp_data["packages"] + + def extract_packages_from_yaml(data: dict) -> dict[str, str]: """Extract COMMON BUS package includes from parsed YAML. @@ -82,7 +115,7 @@ def extract_packages_from_yaml(data: dict) -> dict[str, str]: if dep not in common_bus_packages: continue # Mark as included via dependency - packages[f"_dep_{dep}"] = f"(included via {name})" + packages[f"{DEPENDENCY_MARKER_PREFIX}{dep}"] = f"(included via {name})" return packages @@ -195,6 +228,9 @@ def merge_component_configs( # Start with empty config merged_config_data = {} + # Convert tests_dir to string for caching + tests_dir_str = str(tests_dir) + # Process each component for comp_name in component_names: comp_dir = tests_dir / comp_name @@ -206,26 +242,29 @@ def merge_component_configs( # Load the component's test file comp_data = load_yaml_file(test_file) - # Validate packages are compatible - # Components with no packages (no_buses) can merge with any group + # Merge packages from all components (cross-bus merging) + # Components can have different packages (e.g., one with ble, another with uart) + # as long as they don't conflict (checked by are_buses_compatible before calling this) comp_packages = extract_packages_from_yaml(comp_data) if all_packages is None: - # First component - set the baseline - all_packages = comp_packages - elif not comp_packages: - # This component has no packages (no_buses) - it can merge with any group - pass - elif not all_packages: - # Previous components had no packages, but this one does - adopt these packages - all_packages = comp_packages - elif comp_packages != all_packages: - # Both have packages but they differ - this is an error - raise ValueError( - f"Component {comp_name} has different packages than previous components. " - f"Expected: {all_packages}, Got: {comp_packages}. " - f"All components must use the same common bus configs to be merged." - ) + # First component - initialize package dict + all_packages = comp_packages if comp_packages else {} + elif comp_packages: + # Merge packages - combine all unique package types + # If both have the same package type, verify they're identical + for pkg_name, pkg_config in comp_packages.items(): + if pkg_name in all_packages: + # Same package type - verify config matches + if all_packages[pkg_name] != pkg_config: + raise ValueError( + f"Component {comp_name} has conflicting config for package '{pkg_name}'. " + f"Expected: {all_packages[pkg_name]}, Got: {pkg_config}. " + f"Components with conflicting bus configs cannot be merged." + ) + else: + # New package type - add it + all_packages[pkg_name] = pkg_config # Handle $component_dir by replacing with absolute path # This allows components that use local file references to be grouped @@ -287,26 +326,51 @@ def merge_component_configs( # merge_config handles list merging with ID-based deduplication automatically merged_config_data = merge_config(merged_config_data, comp_data) - # Add packages back (only once, since they're identical) - # IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.) + # Add merged packages back (union of all component packages) + # IMPORTANT: Only include common bus packages (spi, i2c, uart, etc.) # Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs if all_packages: - first_comp_data = load_yaml_file( - tests_dir / component_names[0] / f"test.{platform}.yaml" - ) - if "packages" in first_comp_data and isinstance( - first_comp_data["packages"], dict - ): - # Filter to only include common bus packages - # Only dict format can contain common bus packages - common_bus_packages = get_common_bus_packages() - filtered_packages = { - name: value - for name, value in first_comp_data["packages"].items() - if name in common_bus_packages - } - if filtered_packages: - merged_config_data["packages"] = filtered_packages + # Build packages dict from merged all_packages + # all_packages is a dict mapping package_name -> str(package_value) + # We need to reconstruct the actual package values by loading them from any component + # Since packages with the same name must have identical configs (verified above), + # we can load the package value from the first component that has each package + common_bus_packages = get_common_bus_packages() + merged_packages: dict[str, Any] = {} + + # Collect packages that are included as dependencies + # If modbus is present, uart is included via modbus.packages.uart + packages_to_skip: set[str] = set() + for pkg_name in all_packages: + if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX): + # Extract the actual package name (remove _dep_ prefix) + dep_name = pkg_name[len(DEPENDENCY_MARKER_PREFIX) :] + packages_to_skip.add(dep_name) + + for pkg_name in all_packages: + # Skip dependency markers + if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX): + continue + # Skip non-common-bus packages + if pkg_name not in common_bus_packages: + continue + # Skip packages that are included as dependencies of other packages + # This prevents duplicate definitions (e.g., uart via modbus + uart separately) + if pkg_name in packages_to_skip: + continue + + # Find a component that has this package and extract its value + # Uses cached lookup to avoid re-loading the same files + for comp_name in component_names: + comp_packages = get_component_packages( + comp_name, platform, tests_dir_str + ) + if pkg_name in comp_packages: + merged_packages[pkg_name] = comp_packages[pkg_name] + break + + if merged_packages: + merged_config_data["packages"] = merged_packages # Deduplicate items with same ID (keeps first occurrence) merged_config_data = deduplicate_by_id(merged_config_data) diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index 9730db4988..dff46d3619 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -22,9 +22,11 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) from script.analyze_component_buses import ( ISOLATED_COMPONENTS, + ISOLATED_SIGNATURE_PREFIX, NO_BUSES_SIGNATURE, analyze_all_components, create_grouping_signature, + merge_compatible_bus_groups, ) # Weighting for batch creation @@ -33,6 +35,10 @@ from script.analyze_component_buses import ( ISOLATED_WEIGHT = 10 GROUPABLE_WEIGHT = 1 +# Platform used for batching (platform-agnostic batching) +# Batches are split across CI runners and each runner tests all platforms +ALL_PLATFORMS = "all" + def has_test_files(component_name: str, tests_dir: Path) -> bool: """Check if a component has test files. @@ -57,7 +63,7 @@ def create_intelligent_batches( tests_dir: Path, batch_size: int = 40, directly_changed: set[str] | None = None, -) -> list[list[str]]: +) -> tuple[list[list[str]], dict[tuple[str, str], list[str]]]: """Create batches optimized for component grouping. Args: @@ -67,7 +73,9 @@ def create_intelligent_batches( directly_changed: Set of directly changed components (for logging only) Returns: - List of component batches (lists of component names) + Tuple of (batches, signature_groups) where: + - batches: List of component batches (lists of component names) + - signature_groups: Dict mapping (platform, signature) to component lists """ # Filter out components without test files # Platform components like 'climate' and 'climate_ir' don't have test files @@ -91,8 +99,9 @@ def create_intelligent_batches( # Group components by their bus signature ONLY (ignore platform) # All platforms will be tested by test_build_components.py for each batch - # Key: signature, Value: list of components - signature_groups: dict[str, list[str]] = defaultdict(list) + # Key: (platform, signature), Value: list of components + # We use ALL_PLATFORMS since batching is platform-agnostic + signature_groups: dict[tuple[str, str], list[str]] = defaultdict(list) for component in components_with_tests: # Components that can't be grouped get unique signatures @@ -107,7 +116,9 @@ def create_intelligent_batches( or (directly_changed and component in directly_changed) ) if is_isolated: - signature_groups[f"isolated_{component}"].append(component) + signature_groups[ + (ALL_PLATFORMS, f"{ISOLATED_SIGNATURE_PREFIX}{component}") + ].append(component) continue # Get signature from any platform (they should all have the same buses) @@ -117,11 +128,17 @@ def create_intelligent_batches( if buses: signature = create_grouping_signature({platform: buses}, platform) # Group by signature only - platform doesn't matter for batching - signature_groups[signature].append(component) + # Use ALL_PLATFORMS since we're batching across all platforms + signature_groups[(ALL_PLATFORMS, signature)].append(component) break # Only use first platform for grouping else: # No buses found for any platform - can be grouped together - signature_groups[NO_BUSES_SIGNATURE].append(component) + signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component) + + # Merge compatible bus groups (cross-bus optimization) + # This allows components with different buses (ble + uart) to be batched together + # improving the efficiency of test_build_components.py grouping + signature_groups = merge_compatible_bus_groups(signature_groups) # Create batches by keeping signature groups together # Components with the same signature stay in the same batches @@ -132,8 +149,8 @@ def create_intelligent_batches( # 2. Sort groupable signatures by size (largest first) # 3. "no_buses" components CAN be grouped together def sort_key(item): - signature, components = item - is_isolated = signature.startswith("isolated_") + (_platform, signature), components = item + is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX) # Put "isolated_*" last (1), groupable first (0) # Within each category, sort by size (largest first) return (is_isolated, -len(components)) @@ -149,8 +166,8 @@ def create_intelligent_batches( current_batch = [] current_weight = 0 - for signature, group_components in sorted_groups: - is_isolated = signature.startswith("isolated_") + for (_platform, signature), group_components in sorted_groups: + is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX) weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT for component in group_components: @@ -169,7 +186,7 @@ def create_intelligent_batches( if current_batch: batches.append(current_batch) - return batches + return batches, signature_groups def main() -> int: @@ -231,7 +248,7 @@ def main() -> int: return 1 # Create intelligent batches - batches = create_intelligent_batches( + batches, signature_groups = create_intelligent_batches( components=components, tests_dir=args.tests_dir, batch_size=args.batch_size, @@ -256,6 +273,58 @@ def main() -> int: # Re-analyze to get isolated component counts for summary _, non_groupable, _ = analyze_all_components(args.tests_dir) + # Show grouping details + print("\n=== Component Grouping Details ===", file=sys.stderr) + # Sort groups by signature for readability + groupable_groups = [] + isolated_groups = [] + for (platform, signature), group_comps in sorted(signature_groups.items()): + if signature.startswith(ISOLATED_SIGNATURE_PREFIX): + isolated_groups.append((signature, group_comps)) + else: + groupable_groups.append((signature, group_comps)) + + if groupable_groups: + print( + f"\nGroupable signatures ({len(groupable_groups)} merged groups after cross-bus optimization):", + file=sys.stderr, + ) + for signature, group_comps in sorted( + groupable_groups, key=lambda x: (-len(x[1]), x[0]) + ): + # Check if this is a merged signature (contains +) + is_merged = "+" in signature and signature != NO_BUSES_SIGNATURE + # Special handling for no_buses components + if signature == NO_BUSES_SIGNATURE: + print( + f" [{signature}]: {len(group_comps)} components (used as fillers across batches)", + file=sys.stderr, + ) + else: + merge_indicator = " [MERGED]" if is_merged else "" + print( + f" [{signature}]{merge_indicator}: {len(group_comps)} components", + file=sys.stderr, + ) + # Show first few components as examples + examples = ", ".join(sorted(group_comps)[:8]) + if len(group_comps) > 8: + examples += f", ... (+{len(group_comps) - 8} more)" + print(f" → {examples}", file=sys.stderr) + + if isolated_groups: + print( + f"\nIsolated components ({len(isolated_groups)} components - tested individually):", + file=sys.stderr, + ) + isolated_names = sorted( + [comp for _, comps in isolated_groups for comp in comps] + ) + # Group isolated components for compact display + for i in range(0, len(isolated_names), 10): + chunk = isolated_names[i : i + 10] + print(f" {', '.join(chunk)}", file=sys.stderr) + # Count isolated vs groupable components all_batched_components = [comp for batch in batches for comp in batch] isolated_count = sum( diff --git a/script/test_build_components.py b/script/test_build_components.py index 14fc10977c..c98d425447 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -17,11 +17,13 @@ from __future__ import annotations import argparse from collections import defaultdict +from dataclasses import dataclass import hashlib import os from pathlib import Path import subprocess import sys +import time # Add esphome to path sys.path.insert(0, str(Path(__file__).parent.parent)) @@ -34,32 +36,49 @@ from script.analyze_component_buses import ( analyze_all_components, create_grouping_signature, is_platform_component, + merge_compatible_bus_groups, uses_local_file_references, ) from script.merge_component_configs import merge_component_configs -# Platform-specific maximum group sizes -# ESP8266 has limited IRAM and can't handle large component groups -PLATFORM_MAX_GROUP_SIZE = { - "esp8266-ard": 10, # ESP8266 Arduino has limited IRAM - "esp8266-idf": 10, # ESP8266 IDF also has limited IRAM - # BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed - # Other platforms can handle larger groups -} + +@dataclass +class TestResult: + """Store information about a single test run.""" + + test_id: str + components: list[str] + platform: str + success: bool + duration: float + command: str = "" + test_type: str = "compile" # "config" or "compile" def show_disk_space_if_ci(esphome_command: str) -> None: """Show disk space usage if running in CI during compile. + Only shows output during compilation (not config validation) since + disk space is only relevant when actually building firmware. + Args: esphome_command: The esphome command being run (config/compile/clean) """ - if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile": - print("\n" + "=" * 80) - print("Disk Space After Build:") - print("=" * 80) - subprocess.run(["df", "-h"], check=False) - print("=" * 80 + "\n") + # Only show disk space during compilation in CI + # Config validation doesn't build anything so disk space isn't relevant + if not os.environ.get("GITHUB_ACTIONS"): + return + if esphome_command != "compile": + return + + print("\n" + "=" * 80) + print("Disk Space After Build:") + print("=" * 80) + # Use sys.stdout.flush() to ensure output appears immediately + sys.stdout.flush() + subprocess.run(["df", "-h"], check=False, stdout=sys.stdout, stderr=sys.stderr) + print("=" * 80 + "\n") + sys.stdout.flush() def find_component_tests( @@ -128,6 +147,140 @@ def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]: return dict(platform_files) +def group_components_by_platform( + failed_results: list[TestResult], +) -> dict[tuple[str, str], list[str]]: + """Group failed components by platform and test type for simplified reproduction commands. + + Args: + failed_results: List of failed test results + + Returns: + Dictionary mapping (platform, test_type) to list of component names + """ + platform_components: dict[tuple[str, str], list[str]] = {} + for result in failed_results: + key = (result.platform, result.test_type) + if key not in platform_components: + platform_components[key] = [] + platform_components[key].extend(result.components) + + # Remove duplicates and sort for each platform + return { + key: sorted(set(components)) for key, components in platform_components.items() + } + + +def format_github_summary(test_results: list[TestResult]) -> str: + """Format test results as GitHub Actions job summary markdown. + + Args: + test_results: List of all test results + + Returns: + Markdown formatted summary string + """ + # Separate results into passed and failed + passed_results = [r for r in test_results if r.success] + failed_results = [r for r in test_results if not r.success] + + lines = [] + + # Header with emoji based on success/failure + if failed_results: + lines.append("## :x: Component Tests Failed\n") + else: + lines.append("## :white_check_mark: Component Tests Passed\n") + + # Summary statistics + total_time = sum(r.duration for r in test_results) + # Determine test type from results (all should be the same) + test_type = test_results[0].test_type if test_results else "unknown" + lines.append( + f"**Results:** {len(passed_results)} passed, {len(failed_results)} failed\n" + ) + lines.append(f"**Total time:** {total_time:.1f}s\n") + lines.append(f"**Test type:** `{test_type}`\n") + + # Show failed tests if any + if failed_results: + lines.append("### Failed Tests\n") + lines.append("| Test | Components | Platform | Duration |\n") + lines.append("|------|-----------|----------|----------|\n") + for result in failed_results: + components_str = ", ".join(result.components) + lines.append( + f"| `{result.test_id}` | {components_str} | {result.platform} | {result.duration:.1f}s |\n" + ) + lines.append("\n") + + # Show simplified commands to reproduce failures + # Group all failed components by platform for a single command per platform + lines.append("
\n") + lines.append("Commands to reproduce failures\n\n") + lines.append("```bash\n") + + # Generate one command per platform and test type + platform_components = group_components_by_platform(failed_results) + for platform, test_type in sorted(platform_components.keys()): + components_csv = ",".join(platform_components[(platform, test_type)]) + lines.append( + f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}\n" + ) + + lines.append("```\n") + lines.append("
\n") + + # Show passed tests + if passed_results: + lines.append("### Passed Tests\n\n") + lines.append(f"{len(passed_results)} tests passed successfully\n") + + # Separate grouped and individual tests + grouped_results = [r for r in passed_results if len(r.components) > 1] + individual_results = [r for r in passed_results if len(r.components) == 1] + + if grouped_results: + lines.append("#### Grouped Tests\n") + lines.append("| Components | Platform | Count | Duration |\n") + lines.append("|-----------|----------|-------|----------|\n") + for result in grouped_results: + components_str = ", ".join(result.components) + lines.append( + f"| {components_str} | {result.platform} | {len(result.components)} | {result.duration:.1f}s |\n" + ) + lines.append("\n") + + if individual_results: + lines.append("#### Individual Tests\n") + # Show first 10 individual tests with timing + if len(individual_results) <= 10: + lines.extend( + f"- `{result.test_id}` - {result.duration:.1f}s\n" + for result in individual_results + ) + else: + lines.extend( + f"- `{result.test_id}` - {result.duration:.1f}s\n" + for result in individual_results[:10] + ) + lines.append(f"\n...and {len(individual_results) - 10} more\n") + lines.append("\n") + + return "".join(lines) + + +def write_github_summary(test_results: list[TestResult]) -> None: + """Write GitHub Actions job summary with test results and timing. + + Args: + test_results: List of all test results + """ + summary_content = format_github_summary(test_results) + with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as f: + f.write(summary_content) + + def extract_platform_with_version(base_file: Path) -> str: """Extract platform with version from base filename. @@ -151,7 +304,7 @@ def run_esphome_test( esphome_command: str, continue_on_fail: bool, use_testing_mode: bool = False, -) -> tuple[bool, str]: +) -> TestResult: """Run esphome test for a single component. Args: @@ -166,7 +319,7 @@ def run_esphome_test( use_testing_mode: Whether to use --testing-mode flag Returns: - Tuple of (success status, command string) + TestResult object with test details and timing """ test_name = test_file.stem.split(".")[0] @@ -221,9 +374,13 @@ def run_esphome_test( if use_testing_mode: print(" (using --testing-mode)") + start_time = time.time() + test_id = f"{component}.{test_name}.{platform_with_version}" + try: result = subprocess.run(cmd, check=False) success = result.returncode == 0 + duration = time.time() - start_time # Show disk space after build in CI during compile show_disk_space_if_ci(esphome_command) @@ -236,12 +393,30 @@ def run_esphome_test( print(cmd_str) print() raise subprocess.CalledProcessError(result.returncode, cmd) - return success, cmd_str + + return TestResult( + test_id=test_id, + components=[component], + platform=platform_with_version, + success=success, + duration=duration, + command=cmd_str, + test_type=esphome_command, + ) except subprocess.CalledProcessError: + duration = time.time() - start_time # Re-raise if we're not continuing on fail if not continue_on_fail: raise - return False, cmd_str + return TestResult( + test_id=test_id, + components=[component], + platform=platform_with_version, + success=False, + duration=duration, + command=cmd_str, + test_type=esphome_command, + ) def run_grouped_test( @@ -253,7 +428,7 @@ def run_grouped_test( tests_dir: Path, esphome_command: str, continue_on_fail: bool, -) -> tuple[bool, str]: +) -> TestResult: """Run esphome test for a group of components with shared bus configs. Args: @@ -267,7 +442,7 @@ def run_grouped_test( continue_on_fail: Whether to continue on failure Returns: - Tuple of (success status, command string) + TestResult object with test details and timing """ # Create merged config group_name = "_".join(components[:3]) # Use first 3 components for name @@ -294,8 +469,17 @@ def run_grouped_test( print(f"Error merging configs for {components}: {e}") if not continue_on_fail: raise - # Return empty command string since we failed before building the command - return False, f"# Failed during config merge: {e}" + # Return TestResult for merge failure + test_id = f"GROUPED[{','.join(components)}].{platform_with_version}" + return TestResult( + test_id=test_id, + components=components, + platform=platform_with_version, + success=False, + duration=0.0, + command=f"# Failed during config merge: {e}", + test_type=esphome_command, + ) # Create test file that includes merged config output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml" @@ -334,9 +518,13 @@ def run_grouped_test( print(f"> [GROUPED: {components_str}] [{platform_with_version}]") print(" (using --testing-mode)") + start_time = time.time() + test_id = f"GROUPED[{','.join(components)}].{platform_with_version}" + try: result = subprocess.run(cmd, check=False) success = result.returncode == 0 + duration = time.time() - start_time # Show disk space after build in CI during compile show_disk_space_if_ci(esphome_command) @@ -349,12 +537,30 @@ def run_grouped_test( print(cmd_str) print() raise subprocess.CalledProcessError(result.returncode, cmd) - return success, cmd_str + + return TestResult( + test_id=test_id, + components=components, + platform=platform_with_version, + success=success, + duration=duration, + command=cmd_str, + test_type=esphome_command, + ) except subprocess.CalledProcessError: + duration = time.time() - start_time # Re-raise if we're not continuing on fail if not continue_on_fail: raise - return False, cmd_str + return TestResult( + test_id=test_id, + components=components, + platform=platform_with_version, + success=False, + duration=duration, + command=cmd_str, + test_type=esphome_command, + ) def run_grouped_component_tests( @@ -366,7 +572,7 @@ def run_grouped_component_tests( esphome_command: str, continue_on_fail: bool, additional_isolated: set[str] | None = None, -) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]: +) -> tuple[set[tuple[str, str]], list[TestResult]]: """Run grouped component tests. Args: @@ -380,12 +586,10 @@ def run_grouped_component_tests( additional_isolated: Additional components to treat as isolated (not grouped) Returns: - Tuple of (tested_components, passed_tests, failed_tests, failed_commands) + Tuple of (tested_components, test_results) """ tested_components = set() - passed_tests = [] - failed_tests = [] - failed_commands = {} # Map test_id to command string + test_results = [] # Group components by platform and bus signature grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list) @@ -462,6 +666,11 @@ def run_grouped_component_tests( if signature: grouped_components[(platform, signature)].append(component) + # Merge groups with compatible buses (cross-bus grouping optimization) + # This allows mixing components with different buses (e.g., ble + uart) + # as long as they don't have conflicting configurations for the same bus type + grouped_components = merge_compatible_bus_groups(grouped_components) + # Print detailed grouping plan print("\nGrouping Plan:") print("-" * 80) @@ -560,28 +769,6 @@ def run_grouped_component_tests( # No other groups for this platform - keep no_buses components together grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps - # Split groups that exceed platform-specific maximum sizes - # ESP8266 has limited IRAM and can't handle large component groups - split_groups = {} - for (platform, signature), components in list(grouped_components.items()): - max_size = PLATFORM_MAX_GROUP_SIZE.get(platform) - if max_size and len(components) > max_size: - # Split this group into smaller groups - print( - f"\n ℹ️ Splitting {platform} group (signature: {signature}) " - f"from {len(components)} to max {max_size} components per group" - ) - # Remove original group - del grouped_components[(platform, signature)] - # Create split groups - for i in range(0, len(components), max_size): - split_components = components[i : i + max_size] - # Create unique signature for each split group - split_signature = f"{signature}_split{i // max_size + 1}" - split_groups[(platform, split_signature)] = split_components - # Add split groups back - grouped_components.update(split_groups) - groups_to_test = [] individual_tests = set() # Use set to avoid duplicates @@ -672,7 +859,7 @@ def run_grouped_component_tests( continue # Run grouped test - success, cmd_str = run_grouped_test( + test_result = run_grouped_test( components=components_to_group, platform=platform, platform_with_version=platform_with_version, @@ -687,17 +874,10 @@ def run_grouped_component_tests( for comp in components_to_group: tested_components.add((comp, platform_with_version)) - # Record result for each component - show all components in grouped tests - test_id = ( - f"GROUPED[{','.join(components_to_group)}].{platform_with_version}" - ) - if success: - passed_tests.append(test_id) - else: - failed_tests.append(test_id) - failed_commands[test_id] = cmd_str + # Store test result + test_results.append(test_result) - return tested_components, passed_tests, failed_tests, failed_commands + return tested_components, test_results def run_individual_component_test( @@ -710,9 +890,7 @@ def run_individual_component_test( esphome_command: str, continue_on_fail: bool, tested_components: set[tuple[str, str]], - passed_tests: list[str], - failed_tests: list[str], - failed_commands: dict[str, str], + test_results: list[TestResult], ) -> None: """Run an individual component test if not already tested in a group. @@ -726,16 +904,13 @@ def run_individual_component_test( esphome_command: ESPHome command continue_on_fail: Whether to continue on failure tested_components: Set of already tested components - passed_tests: List to append passed test IDs - failed_tests: List to append failed test IDs - failed_commands: Dict to store failed test commands + test_results: List to append test results """ # Skip if already tested in a group if (component, platform_with_version) in tested_components: return - test_name = test_file.stem.split(".")[0] - success, cmd_str = run_esphome_test( + test_result = run_esphome_test( component=component, test_file=test_file, platform=platform, @@ -745,12 +920,7 @@ def run_individual_component_test( esphome_command=esphome_command, continue_on_fail=continue_on_fail, ) - test_id = f"{component}.{test_name}.{platform_with_version}" - if success: - passed_tests.append(test_id) - else: - failed_tests.append(test_id) - failed_commands[test_id] = cmd_str + test_results.append(test_result) def test_components( @@ -799,19 +969,12 @@ def test_components( print(f"Found {len(all_tests)} components to test") # Run tests - failed_tests = [] - passed_tests = [] + test_results = [] tested_components = set() # Track which components were tested in groups - failed_commands = {} # Track commands for failed tests # First, run grouped tests if grouping is enabled if enable_grouping: - ( - tested_components, - passed_tests, - failed_tests, - failed_commands, - ) = run_grouped_component_tests( + tested_components, grouped_results = run_grouped_component_tests( all_tests=all_tests, platform_filter=platform_filter, platform_bases=platform_bases, @@ -821,6 +984,7 @@ def test_components( continue_on_fail=continue_on_fail, additional_isolated=isolated_components, ) + test_results.extend(grouped_results) # Then run individual tests for components not in groups for component, test_files in sorted(all_tests.items()): @@ -846,9 +1010,7 @@ def test_components( esphome_command=esphome_command, continue_on_fail=continue_on_fail, tested_components=tested_components, - passed_tests=passed_tests, - failed_tests=failed_tests, - failed_commands=failed_commands, + test_results=test_results, ) else: # Platform-specific test @@ -880,31 +1042,40 @@ def test_components( esphome_command=esphome_command, continue_on_fail=continue_on_fail, tested_components=tested_components, - passed_tests=passed_tests, - failed_tests=failed_tests, - failed_commands=failed_commands, + test_results=test_results, ) + # Separate results into passed and failed + passed_results = [r for r in test_results if r.success] + failed_results = [r for r in test_results if not r.success] + # Print summary print("\n" + "=" * 80) - print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed") + print(f"Test Summary: {len(passed_results)} passed, {len(failed_results)} failed") print("=" * 80) - if failed_tests: + if failed_results: print("\nFailed tests:") - for test in failed_tests: - print(f" - {test}") + for result in failed_results: + print(f" - {result.test_id}") - # Print failed commands at the end for easy copy-paste from CI logs + # Print simplified commands grouped by platform and test type for easy copy-paste print("\n" + "=" * 80) - print("Failed test commands (copy-paste to reproduce locally):") + print("Commands to reproduce failures (copy-paste to reproduce locally):") print("=" * 80) - for test in failed_tests: - if test in failed_commands: - print(f"\n# {test}") - print(failed_commands[test]) + platform_components = group_components_by_platform(failed_results) + for platform, test_type in sorted(platform_components.keys()): + components_csv = ",".join(platform_components[(platform, test_type)]) + print( + f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}" + ) print() + # Write GitHub Actions job summary if in CI + if os.environ.get("GITHUB_STEP_SUMMARY"): + write_github_summary(test_results) + + if failed_results: return 1 return 0 diff --git a/tests/components/ade7880/test.esp8266-ard.yaml b/tests/components/ade7880/test.esp8266-ard.yaml index 81a04d0724..8b5e47f0b5 100644 --- a/tests/components/ade7880/test.esp8266-ard.yaml +++ b/tests/components/ade7880/test.esp8266-ard.yaml @@ -1,5 +1,5 @@ substitutions: - irq0_pin: GPIO13 + irq0_pin: GPIO0 irq1_pin: GPIO15 reset_pin: GPIO16 diff --git a/tests/components/ade7953_i2c/common.yaml b/tests/components/ade7953_i2c/common.yaml index 8b2a9588fe..5253759888 100644 --- a/tests/components/ade7953_i2c/common.yaml +++ b/tests/components/ade7953_i2c/common.yaml @@ -4,10 +4,13 @@ sensor: irq_pin: ${irq_pin} voltage: name: ADE7953 Voltage + id: ade7953_i2c_voltage current_a: name: ADE7953 Current A + id: ade7953_i2c_current_a current_b: name: ADE7953 Current B + id: ade7953_i2c_current_b power_factor_a: name: ADE7953 Power Factor A power_factor_b: diff --git a/tests/components/ade7953_spi/common.yaml b/tests/components/ade7953_spi/common.yaml index 30b5258a2a..f66ab697a1 100644 --- a/tests/components/ade7953_spi/common.yaml +++ b/tests/components/ade7953_spi/common.yaml @@ -4,13 +4,13 @@ sensor: irq_pin: ${irq_pin} voltage: name: ADE7953 Voltage - id: ade7953_voltage + id: ade7953_spi_voltage current_a: name: ADE7953 Current A - id: ade7953_current_a + id: ade7953_spi_current_a current_b: name: ADE7953 Current B - id: ade7953_current_b + id: ade7953_spi_current_b power_factor_a: name: ADE7953 Power Factor A power_factor_b: diff --git a/tests/components/as3935_i2c/common.yaml b/tests/components/as3935_i2c/common.yaml index a758bb7f56..d659486e83 100644 --- a/tests/components/as3935_i2c/common.yaml +++ b/tests/components/as3935_i2c/common.yaml @@ -1,13 +1,16 @@ as3935_i2c: + id: as3935_i2c_id i2c_id: i2c_bus irq_pin: ${irq_pin} binary_sensor: - platform: as3935 + as3935_id: as3935_i2c_id name: Storm Alert sensor: - platform: as3935 + as3935_id: as3935_i2c_id lightning_energy: name: Lightning Energy distance: diff --git a/tests/components/as3935_spi/common.yaml b/tests/components/as3935_spi/common.yaml index 5898d5d365..d2942dc01d 100644 --- a/tests/components/as3935_spi/common.yaml +++ b/tests/components/as3935_spi/common.yaml @@ -1,13 +1,16 @@ as3935_spi: + id: as3935_spi_id cs_pin: ${cs_pin} irq_pin: ${irq_pin} binary_sensor: - platform: as3935 + as3935_id: as3935_spi_id name: Storm Alert sensor: - platform: as3935 + as3935_id: as3935_spi_id lightning_energy: name: Lightning Energy distance: diff --git a/tests/components/axs15231/common.yaml b/tests/components/axs15231/common.yaml index 3f07af80ea..d4fd3becbb 100644 --- a/tests/components/axs15231/common.yaml +++ b/tests/components/axs15231/common.yaml @@ -1,7 +1,7 @@ display: - platform: ssd1306_i2c i2c_id: i2c_bus - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: 19 pages: @@ -13,6 +13,6 @@ touchscreen: - platform: axs15231 i2c_id: i2c_bus id: axs15231_touchscreen - display: ssd1306_display + display: ssd1306_i2c_display interrupt_pin: 20 reset_pin: 18 diff --git a/tests/components/bme280_i2c/common.yaml b/tests/components/bme280_i2c/common.yaml index e6d41d209c..a31a6f9a6c 100644 --- a/tests/components/bme280_i2c/common.yaml +++ b/tests/components/bme280_i2c/common.yaml @@ -3,12 +3,12 @@ sensor: i2c_id: i2c_bus address: 0x76 temperature: - id: bme280_temperature + id: bme280_i2c_temperature name: BME280 Temperature humidity: - id: bme280_humidity + id: bme280_i2c_humidity name: BME280 Humidity pressure: - id: bme280_pressure + id: bme280_i2c_pressure name: BME280 Pressure update_interval: 15s diff --git a/tests/components/bme280_spi/common.yaml b/tests/components/bme280_spi/common.yaml index 9a50b410fb..d97b475f0e 100644 --- a/tests/components/bme280_spi/common.yaml +++ b/tests/components/bme280_spi/common.yaml @@ -2,12 +2,12 @@ sensor: - platform: bme280_spi cs_pin: ${cs_pin} temperature: - id: bme280_temperature + id: bme280_spi_temperature name: BME280 Temperature humidity: - id: bme280_humidity + id: bme280_spi_humidity name: BME280 Humidity pressure: - id: bme280_pressure + id: bme280_spi_pressure name: BME280 Pressure update_interval: 15s diff --git a/tests/components/bmp280_i2c/common.yaml b/tests/components/bmp280_i2c/common.yaml index 785343de7d..77a9db7fc5 100644 --- a/tests/components/bmp280_i2c/common.yaml +++ b/tests/components/bmp280_i2c/common.yaml @@ -3,10 +3,10 @@ sensor: i2c_id: i2c_bus address: 0x77 temperature: - id: bmp280_temperature + id: bmp280_i2c_temperature name: Outside Temperature pressure: name: Outside Pressure - id: bmp280_pressure + id: bmp280_i2c_pressure iir_filter: 16x update_interval: 15s diff --git a/tests/components/bmp280_spi/common.yaml b/tests/components/bmp280_spi/common.yaml index fa88967ca4..1be54f6b74 100644 --- a/tests/components/bmp280_spi/common.yaml +++ b/tests/components/bmp280_spi/common.yaml @@ -2,10 +2,10 @@ sensor: - platform: bmp280_spi cs_pin: ${cs_pin} temperature: - id: bmp280_temperature + id: bmp280_spi_temperature name: Outside Temperature pressure: name: Outside Pressure - id: bmp280_pressure + id: bmp280_spi_pressure iir_filter: 16x update_interval: 15s diff --git a/tests/components/bmp3xx_i2c/common.yaml b/tests/components/bmp3xx_i2c/common.yaml index ebc4921b84..e651072f25 100644 --- a/tests/components/bmp3xx_i2c/common.yaml +++ b/tests/components/bmp3xx_i2c/common.yaml @@ -3,8 +3,10 @@ sensor: i2c_id: i2c_bus address: 0x77 temperature: + id: bmp3xx_i2c_temperature name: BMP Temperature oversampling: 16x pressure: + id: bmp3xx_i2c_pressure name: BMP Pressure iir_filter: 2X diff --git a/tests/components/bmp3xx_spi/common.yaml b/tests/components/bmp3xx_spi/common.yaml index d6acef1833..b59d46c967 100644 --- a/tests/components/bmp3xx_spi/common.yaml +++ b/tests/components/bmp3xx_spi/common.yaml @@ -2,8 +2,10 @@ sensor: - platform: bmp3xx_spi cs_pin: ${cs_pin} temperature: + id: bmp3xx_spi_temperature name: BMP Temperature oversampling: 16x pressure: + id: bmp3xx_spi_pressure name: BMP Pressure iir_filter: 2X diff --git a/tests/components/camera/test.esp32-idf.yaml b/tests/components/camera/test.esp32-idf.yaml index 3d93dd6418..ef8f74d4eb 100644 --- a/tests/components/camera/test.esp32-idf.yaml +++ b/tests/components/camera/test.esp32-idf.yaml @@ -1,4 +1,4 @@ packages: - camera: !include ../../test_build_components/common/camera/esp32-idf.yaml + i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/camera_encoder/test.esp32-idf.yaml b/tests/components/camera_encoder/test.esp32-idf.yaml index 3d93dd6418..ef8f74d4eb 100644 --- a/tests/components/camera_encoder/test.esp32-idf.yaml +++ b/tests/components/camera_encoder/test.esp32-idf.yaml @@ -1,4 +1,4 @@ packages: - camera: !include ../../test_build_components/common/camera/esp32-idf.yaml + i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/chsc6x/test.esp32-idf.yaml b/tests/components/chsc6x/test.esp32-idf.yaml index ea3686d8bd..fa7c72150e 100644 --- a/tests/components/chsc6x/test.esp32-idf.yaml +++ b/tests/components/chsc6x/test.esp32-idf.yaml @@ -4,6 +4,7 @@ packages: display: - platform: ili9xxx + spi_id: spi_bus id: ili9xxx_display model: GC9A01A invert_colors: True @@ -16,5 +17,6 @@ display: touchscreen: - platform: chsc6x + i2c_id: i2c_bus display: ili9xxx_display interrupt_pin: 20 diff --git a/tests/components/ektf2232/common.yaml b/tests/components/ektf2232/common.yaml index 8ab57be46f..1c4d768b08 100644 --- a/tests/components/ektf2232/common.yaml +++ b/tests/components/ektf2232/common.yaml @@ -1,7 +1,7 @@ display: - platform: ssd1306_i2c i2c_id: i2c_bus - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: ${display_reset_pin} pages: @@ -15,7 +15,7 @@ touchscreen: id: ektf2232_touchscreen interrupt_pin: ${interrupt_pin} reset_pin: ${touch_reset_pin} - display: ssd1306_display + display: ssd1306_i2c_display on_touch: - logger.log: format: Touch at (%d, %d) diff --git a/tests/components/ens160_i2c/common.yaml b/tests/components/ens160_i2c/common.yaml index 685c8d3fee..1da2bacec2 100644 --- a/tests/components/ens160_i2c/common.yaml +++ b/tests/components/ens160_i2c/common.yaml @@ -3,8 +3,11 @@ sensor: i2c_id: i2c_bus address: 0x53 eco2: + id: ens160_i2c_eco2 name: "ENS160 eCO2" tvoc: + id: ens160_i2c_tvoc name: "ENS160 Total Volatile Organic Compounds" aqi: + id: ens160_i2c_aqi name: "ENS160 Air Quality Index" diff --git a/tests/components/ens160_spi/common.yaml b/tests/components/ens160_spi/common.yaml index 53000a5e96..46a3f40b40 100644 --- a/tests/components/ens160_spi/common.yaml +++ b/tests/components/ens160_spi/common.yaml @@ -2,8 +2,11 @@ sensor: - platform: ens160_spi cs_pin: ${cs_pin} eco2: + id: ens160_spi_eco2 name: "ENS160 eCO2" tvoc: + id: ens160_spi_tvoc name: "ENS160 Total Volatile Organic Compounds" aqi: + id: ens160_spi_aqi name: "ENS160 Air Quality Index" diff --git a/tests/components/esp32_camera/test.esp32-idf.yaml b/tests/components/esp32_camera/test.esp32-idf.yaml index 3d93dd6418..ef8f74d4eb 100644 --- a/tests/components/esp32_camera/test.esp32-idf.yaml +++ b/tests/components/esp32_camera/test.esp32-idf.yaml @@ -1,4 +1,4 @@ packages: - camera: !include ../../test_build_components/common/camera/esp32-idf.yaml + i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/esp32_camera_web_server/test.esp32-idf.yaml b/tests/components/esp32_camera_web_server/test.esp32-idf.yaml index 3d93dd6418..ef8f74d4eb 100644 --- a/tests/components/esp32_camera_web_server/test.esp32-idf.yaml +++ b/tests/components/esp32_camera_web_server/test.esp32-idf.yaml @@ -1,4 +1,4 @@ packages: - camera: !include ../../test_build_components/common/camera/esp32-idf.yaml + i2c_camera: !include ../../test_build_components/common/i2c_camera/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/font/common.yaml b/tests/components/font/common.yaml index 2d2e970536..6ba52e3d97 100644 --- a/tests/components/font/common.yaml +++ b/tests/components/font/common.yaml @@ -49,6 +49,7 @@ font: display: - platform: ssd1306_i2c + i2c_id: i2c_bus id: ssd1306_display model: SSD1306_128X64 reset_pin: ${display_reset_pin} diff --git a/tests/components/ft63x6/test.esp8266-ard.yaml b/tests/components/ft63x6/test.esp8266-ard.yaml index d6b6903029..3ac5c645e3 100644 --- a/tests/components/ft63x6/test.esp8266-ard.yaml +++ b/tests/components/ft63x6/test.esp8266-ard.yaml @@ -1,5 +1,5 @@ substitutions: - interrupt_pin: GPIO12 + interrupt_pin: GPIO0 reset_pin: GPIO16 packages: diff --git a/tests/components/graph/common.yaml b/tests/components/graph/common.yaml index 578de5a60c..11e2a16ca1 100644 --- a/tests/components/graph/common.yaml +++ b/tests/components/graph/common.yaml @@ -11,6 +11,7 @@ graph: display: - platform: ssd1306_i2c + i2c_id: i2c_bus id: ssd1306_display model: SSD1306_128X64 reset_pin: ${reset_pin} diff --git a/tests/components/graphical_display_menu/common.yaml b/tests/components/graphical_display_menu/common.yaml index 0b8f20d64b..6cee2af232 100644 --- a/tests/components/graphical_display_menu/common.yaml +++ b/tests/components/graphical_display_menu/common.yaml @@ -1,6 +1,6 @@ display: - platform: ssd1306_i2c - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: ${reset_pin} pages: @@ -36,7 +36,7 @@ switch: graphical_display_menu: id: test_graphical_display_menu - display: ssd1306_display + display: ssd1306_i2c_display font: roboto active: false mode: rotary diff --git a/tests/components/gt911/common.yaml b/tests/components/gt911/common.yaml index 5f9748afb6..ff464cda24 100644 --- a/tests/components/gt911/common.yaml +++ b/tests/components/gt911/common.yaml @@ -1,7 +1,7 @@ display: - platform: ssd1306_i2c i2c_id: i2c_bus - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: ${display_reset_pin} pages: @@ -13,7 +13,7 @@ touchscreen: - platform: gt911 i2c_id: i2c_bus id: gt911_touchscreen - display: ssd1306_display + display: ssd1306_i2c_display interrupt_pin: ${interrupt_pin} reset_pin: ${reset_pin} diff --git a/tests/components/hx711/test.esp8266-ard.yaml b/tests/components/hx711/test.esp8266-ard.yaml index defef165e3..e7c017ed99 100644 --- a/tests/components/hx711/test.esp8266-ard.yaml +++ b/tests/components/hx711/test.esp8266-ard.yaml @@ -1,5 +1,5 @@ substitutions: - clk_pin: GPIO4 - dout_pin: GPIO5 + clk_pin: GPIO0 + dout_pin: GPIO2 <<: !include common.yaml diff --git a/tests/components/ina2xx_i2c/common.yaml b/tests/components/ina2xx_i2c/common.yaml index 7d586f136e..2416ad6daf 100644 --- a/tests/components/ina2xx_i2c/common.yaml +++ b/tests/components/ina2xx_i2c/common.yaml @@ -7,9 +7,21 @@ sensor: max_current: 40 A adc_range: 1 temperature_coefficient: 50 - shunt_voltage: "INA2xx Shunt Voltage" - bus_voltage: "INA2xx Bus Voltage" - current: "INA2xx Current" - power: "INA2xx Power" - energy: "INA2xx Energy" - charge: "INA2xx Charge" + shunt_voltage: + id: ina2xx_i2c_shunt_voltage + name: "INA2xx Shunt Voltage" + bus_voltage: + id: ina2xx_i2c_bus_voltage + name: "INA2xx Bus Voltage" + current: + id: ina2xx_i2c_current + name: "INA2xx Current" + power: + id: ina2xx_i2c_power + name: "INA2xx Power" + energy: + id: ina2xx_i2c_energy + name: "INA2xx Energy" + charge: + id: ina2xx_i2c_charge + name: "INA2xx Charge" diff --git a/tests/components/ina2xx_spi/common.yaml b/tests/components/ina2xx_spi/common.yaml index d9b2300e26..8de77eba26 100644 --- a/tests/components/ina2xx_spi/common.yaml +++ b/tests/components/ina2xx_spi/common.yaml @@ -6,9 +6,21 @@ sensor: max_current: 40 A adc_range: 1 temperature_coefficient: 50 - shunt_voltage: "INA2xx Shunt Voltage" - bus_voltage: "INA2xx Bus Voltage" - current: "INA2xx Current" - power: "INA2xx Power" - energy: "INA2xx Energy" - charge: "INA2xx Charge" + shunt_voltage: + id: ina2xx_spi_shunt_voltage + name: "INA2xx Shunt Voltage" + bus_voltage: + id: ina2xx_spi_bus_voltage + name: "INA2xx Bus Voltage" + current: + id: ina2xx_spi_current + name: "INA2xx Current" + power: + id: ina2xx_spi_power + name: "INA2xx Power" + energy: + id: ina2xx_spi_energy + name: "INA2xx Energy" + charge: + id: ina2xx_spi_charge + name: "INA2xx Charge" diff --git a/tests/components/lilygo_t5_47/common.yaml b/tests/components/lilygo_t5_47/common.yaml index 7079139ec7..18f1ba10ae 100644 --- a/tests/components/lilygo_t5_47/common.yaml +++ b/tests/components/lilygo_t5_47/common.yaml @@ -1,7 +1,7 @@ display: - platform: ssd1306_i2c i2c_id: i2c_bus - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: ${reset_pin} pages: @@ -14,7 +14,7 @@ touchscreen: i2c_id: i2c_bus id: lilygo_touchscreen interrupt_pin: ${interrupt_pin} - display: ssd1306_display + display: ssd1306_i2c_display on_touch: - logger.log: format: Touch at (%d, %d) diff --git a/tests/components/pn532_i2c/common.yaml b/tests/components/pn532_i2c/common.yaml index f328cd40ee..947e1151aa 100644 --- a/tests/components/pn532_i2c/common.yaml +++ b/tests/components/pn532_i2c/common.yaml @@ -1,9 +1,9 @@ pn532_i2c: i2c_id: i2c_bus - id: pn532_nfcc + id: pn532_nfcc_i2c binary_sensor: - platform: pn532 - pn532_id: pn532_nfcc + pn532_id: pn532_nfcc_i2c name: PN532 NFC Tag uid: 74-10-37-94 diff --git a/tests/components/pn532_spi/common.yaml b/tests/components/pn532_spi/common.yaml index e749a9896a..f9149af35f 100644 --- a/tests/components/pn532_spi/common.yaml +++ b/tests/components/pn532_spi/common.yaml @@ -1,9 +1,9 @@ pn532_spi: - id: pn532_nfcc + id: pn532_nfcc_spi cs_pin: ${cs_pin} binary_sensor: - platform: pn532 - pn532_id: pn532_nfcc + pn532_id: pn532_nfcc_spi name: PN532 NFC Tag uid: 74-10-37-94 diff --git a/tests/components/pn7160_i2c/common.yaml b/tests/components/pn7160_i2c/common.yaml index 9807bff0f0..fa9a876d1c 100644 --- a/tests/components/pn7160_i2c/common.yaml +++ b/tests/components/pn7160_i2c/common.yaml @@ -1,23 +1,23 @@ esphome: on_boot: then: - - tag.set_clean_mode: nfcc_pn7160 - - tag.set_format_mode: nfcc_pn7160 - - tag.set_read_mode: nfcc_pn7160 + - tag.set_clean_mode: nfcc_pn7160_i2c + - tag.set_format_mode: nfcc_pn7160_i2c + - tag.set_read_mode: nfcc_pn7160_i2c - tag.set_write_message: message: https://www.home-assistant.io/tag/pulse include_android_app_record: false - - tag.set_write_mode: nfcc_pn7160 + - tag.set_write_mode: nfcc_pn7160_i2c - tag.set_emulation_message: message: https://www.home-assistant.io/tag/pulse include_android_app_record: false - - tag.emulation_off: nfcc_pn7160 - - tag.emulation_on: nfcc_pn7160 - - tag.polling_off: nfcc_pn7160 - - tag.polling_on: nfcc_pn7160 + - tag.emulation_off: nfcc_pn7160_i2c + - tag.emulation_on: nfcc_pn7160_i2c + - tag.polling_off: nfcc_pn7160_i2c + - tag.polling_on: nfcc_pn7160_i2c pn7150_i2c: - id: nfcc_pn7160 + id: nfcc_pn7160_i2c i2c_id: i2c_bus irq_pin: ${irq_pin} ven_pin: ${ven_pin} diff --git a/tests/components/pn7160_spi/common.yaml b/tests/components/pn7160_spi/common.yaml index d467eb093f..53b37b38f4 100644 --- a/tests/components/pn7160_spi/common.yaml +++ b/tests/components/pn7160_spi/common.yaml @@ -1,23 +1,23 @@ esphome: on_boot: then: - - tag.set_clean_mode: nfcc_pn7160 - - tag.set_format_mode: nfcc_pn7160 - - tag.set_read_mode: nfcc_pn7160 + - tag.set_clean_mode: nfcc_pn7160_spi + - tag.set_format_mode: nfcc_pn7160_spi + - tag.set_read_mode: nfcc_pn7160_spi - tag.set_write_message: message: https://www.home-assistant.io/tag/pulse include_android_app_record: false - - tag.set_write_mode: nfcc_pn7160 + - tag.set_write_mode: nfcc_pn7160_spi - tag.set_emulation_message: message: https://www.home-assistant.io/tag/pulse include_android_app_record: false - - tag.emulation_off: nfcc_pn7160 - - tag.emulation_on: nfcc_pn7160 - - tag.polling_off: nfcc_pn7160 - - tag.polling_on: nfcc_pn7160 + - tag.emulation_off: nfcc_pn7160_spi + - tag.emulation_on: nfcc_pn7160_spi + - tag.polling_off: nfcc_pn7160_spi + - tag.polling_on: nfcc_pn7160_spi pn7160_spi: - id: nfcc_pn7160 + id: nfcc_pn7160_spi cs_pin: ${cs_pin} irq_pin: ${irq_pin} ven_pin: ${ven_pin} diff --git a/tests/components/rc522_i2c/common.yaml b/tests/components/rc522_i2c/common.yaml index 65b92a3e78..0e624351d4 100644 --- a/tests/components/rc522_i2c/common.yaml +++ b/tests/components/rc522_i2c/common.yaml @@ -1,5 +1,5 @@ rc522_i2c: - - id: rc522_nfcc + - id: rc522_nfcc_i2c i2c_id: i2c_bus update_interval: 1s on_tag: @@ -8,6 +8,6 @@ rc522_i2c: binary_sensor: - platform: rc522 - rc522_id: rc522_nfcc + rc522_id: rc522_nfcc_i2c name: RC522 NFC Tag uid: 74-10-37-94 diff --git a/tests/components/rc522_spi/common.yaml b/tests/components/rc522_spi/common.yaml index 4ce1d6584b..c4830850de 100644 --- a/tests/components/rc522_spi/common.yaml +++ b/tests/components/rc522_spi/common.yaml @@ -1,9 +1,9 @@ rc522_spi: - id: rc522_nfcc + id: rc522_nfcc_spi cs_pin: ${cs_pin} binary_sensor: - platform: rc522 - rc522_id: rc522_nfcc - name: PN532 NFC Tag + rc522_id: rc522_nfcc_spi + name: RC522 NFC Tag uid: 74-10-37-94 diff --git a/tests/components/selec_meter/test.esp8266-ard.yaml b/tests/components/selec_meter/test.esp8266-ard.yaml index 48a7307795..6daa08c22b 100644 --- a/tests/components/selec_meter/test.esp8266-ard.yaml +++ b/tests/components/selec_meter/test.esp8266-ard.yaml @@ -1,7 +1,7 @@ substitutions: tx_pin: GPIO0 rx_pin: GPIO2 - flow_control_pin: GPIO4 + flow_control_pin: GPIO15 packages: modbus: !include ../../test_build_components/common/modbus/esp8266-ard.yaml diff --git a/tests/components/sn74hc595/test.esp8266-ard.yaml b/tests/components/sn74hc595/test.esp8266-ard.yaml index cc011e01d4..e0de8bb0a3 100644 --- a/tests/components/sn74hc595/test.esp8266-ard.yaml +++ b/tests/components/sn74hc595/test.esp8266-ard.yaml @@ -2,8 +2,8 @@ packages: spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml substitutions: - clock_pin: GPIO5 - data_pin: GPIO4 + clock_pin: GPIO15 + data_pin: GPIO16 latch_pin1: GPIO2 oe_pin1: GPIO0 latch_pin2: GPIO3 diff --git a/tests/components/ssd1306_i2c/common.yaml b/tests/components/ssd1306_i2c/common.yaml index e876fcb36a..09eb569a8e 100644 --- a/tests/components/ssd1306_i2c/common.yaml +++ b/tests/components/ssd1306_i2c/common.yaml @@ -4,7 +4,7 @@ display: model: SSD1306_128X64 reset_pin: ${reset_pin} address: 0x3C - id: display1 + id: ssd1306_i2c_display contrast: 60% pages: - id: ssd1306_i2c_page1 diff --git a/tests/components/ssd1306_spi/common.yaml b/tests/components/ssd1306_spi/common.yaml index 2a2adb4146..0297abc192 100644 --- a/tests/components/ssd1306_spi/common.yaml +++ b/tests/components/ssd1306_spi/common.yaml @@ -1,5 +1,6 @@ display: - platform: ssd1306_spi + id: ssd1306_spi_display model: SSD1306 128x64 cs_pin: ${cs_pin} dc_pin: ${dc_pin} diff --git a/tests/components/ssd1327_i2c/common.yaml b/tests/components/ssd1327_i2c/common.yaml index c90e9678dd..c5f2123d9a 100644 --- a/tests/components/ssd1327_i2c/common.yaml +++ b/tests/components/ssd1327_i2c/common.yaml @@ -4,7 +4,7 @@ display: model: SSD1327_128x128 reset_pin: ${reset_pin} address: 0x3C - id: display1 + id: ssd1327_i2c_display pages: - id: ssd1327_i2c_page1 lambda: |- diff --git a/tests/components/ssd1327_spi/common.yaml b/tests/components/ssd1327_spi/common.yaml index 1aa4fb5a1c..b46e61e080 100644 --- a/tests/components/ssd1327_spi/common.yaml +++ b/tests/components/ssd1327_spi/common.yaml @@ -1,5 +1,6 @@ display: - platform: ssd1327_spi + id: ssd1327_spi_display model: SSD1327 128x128 cs_pin: ${cs_pin} dc_pin: ${dc_pin} diff --git a/tests/components/st7567_i2c/common.yaml b/tests/components/st7567_i2c/common.yaml index 9a4cd79faa..c81d6825e3 100644 --- a/tests/components/st7567_i2c/common.yaml +++ b/tests/components/st7567_i2c/common.yaml @@ -3,7 +3,7 @@ display: i2c_id: i2c_bus reset_pin: ${reset_pin} address: 0x3C - id: display1 + id: st7567_i2c_display pages: - id: st7567_i2c_page1 lambda: |- diff --git a/tests/components/st7567_spi/common.yaml b/tests/components/st7567_spi/common.yaml index b5a4074e13..25a8932ee1 100644 --- a/tests/components/st7567_spi/common.yaml +++ b/tests/components/st7567_spi/common.yaml @@ -1,5 +1,6 @@ display: - platform: st7567_spi + id: st7567_spi_display cs_pin: ${cs_pin} dc_pin: ${dc_pin} reset_pin: ${reset_pin} diff --git a/tests/components/syslog/common.yaml b/tests/components/syslog/common.yaml index cd6e63c9ec..daa913f009 100644 --- a/tests/components/syslog/common.yaml +++ b/tests/components/syslog/common.yaml @@ -6,7 +6,8 @@ udp: addresses: ["239.0.60.53"] time: - platform: host + - platform: host + id: host_time syslog: port: 514 diff --git a/tests/components/tt21100/common.yaml b/tests/components/tt21100/common.yaml index bd1830ea8b..56089aed1e 100644 --- a/tests/components/tt21100/common.yaml +++ b/tests/components/tt21100/common.yaml @@ -1,7 +1,7 @@ display: - platform: ssd1306_i2c i2c_id: i2c_bus - id: ssd1306_display + id: ssd1306_i2c_display model: SSD1306_128X64 reset_pin: ${disp_reset_pin} pages: @@ -13,7 +13,7 @@ touchscreen: - platform: tt21100 i2c_id: i2c_bus id: tt21100_touchscreen - display: ssd1306_display + display: ssd1306_i2c_display interrupt_pin: ${interrupt_pin} reset_pin: ${reset_pin} diff --git a/tests/components/wk2132_i2c/test.esp32-idf.yaml b/tests/components/wk2132_i2c/test.esp32-idf.yaml index b47e39c389..6b748a8f20 100644 --- a/tests/components/wk2132_i2c/test.esp32-idf.yaml +++ b/tests/components/wk2132_i2c/test.esp32-idf.yaml @@ -1,4 +1,5 @@ packages: i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml + uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2132_i2c/test.esp32-s3-idf.yaml b/tests/components/wk2132_i2c/test.esp32-s3-idf.yaml index e9d826aa7c..d7b149a6fd 100644 --- a/tests/components/wk2132_i2c/test.esp32-s3-idf.yaml +++ b/tests/components/wk2132_i2c/test.esp32-s3-idf.yaml @@ -4,5 +4,6 @@ substitutions: packages: i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml + uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2132_spi/common.yaml b/tests/components/wk2132_spi/common.yaml index a762c10c92..18294974b9 100644 --- a/tests/components/wk2132_spi/common.yaml +++ b/tests/components/wk2132_spi/common.yaml @@ -1,20 +1,20 @@ wk2132_spi: - - id: wk2132_spi_id + - id: wk2132_spi_bridge cs_pin: ${cs_pin} crystal: 11059200 data_rate: 1MHz uart: - - id: wk2132_spi_id0 + - id: wk2132_spi_uart0 channel: 0 baud_rate: 115200 stop_bits: 1 parity: none - - id: wk2132_spi_id1 + - id: wk2132_spi_uart1 channel: 1 baud_rate: 9600 # Ensures a sensor doesn't break validation sensor: - platform: a02yyuw - uart_id: wk2132_spi_id1 + uart_id: wk2132_spi_uart1 id: distance_sensor diff --git a/tests/components/wk2132_spi/test.esp32-idf.yaml b/tests/components/wk2132_spi/test.esp32-idf.yaml index a3352cf880..9202a691ba 100644 --- a/tests/components/wk2132_spi/test.esp32-idf.yaml +++ b/tests/components/wk2132_spi/test.esp32-idf.yaml @@ -3,5 +3,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-idf.yaml + uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2132_spi/test.esp32-s3-idf.yaml b/tests/components/wk2132_spi/test.esp32-s3-idf.yaml index 6a60c90fb2..9c7d36996e 100644 --- a/tests/components/wk2132_spi/test.esp32-s3-idf.yaml +++ b/tests/components/wk2132_spi/test.esp32-s3-idf.yaml @@ -6,5 +6,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml + uart_bridge_2: !include ../../test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2168_i2c/test.esp32-idf.yaml b/tests/components/wk2168_i2c/test.esp32-idf.yaml index b47e39c389..9d9f0d4931 100644 --- a/tests/components/wk2168_i2c/test.esp32-idf.yaml +++ b/tests/components/wk2168_i2c/test.esp32-idf.yaml @@ -1,4 +1,5 @@ packages: i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2168_i2c/test.esp32-s3-idf.yaml b/tests/components/wk2168_i2c/test.esp32-s3-idf.yaml index e9d826aa7c..115812be97 100644 --- a/tests/components/wk2168_i2c/test.esp32-s3-idf.yaml +++ b/tests/components/wk2168_i2c/test.esp32-s3-idf.yaml @@ -4,5 +4,6 @@ substitutions: packages: i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2168_spi/test.esp32-idf.yaml b/tests/components/wk2168_spi/test.esp32-idf.yaml index a3352cf880..2b56a46b70 100644 --- a/tests/components/wk2168_spi/test.esp32-idf.yaml +++ b/tests/components/wk2168_spi/test.esp32-idf.yaml @@ -3,5 +3,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2168_spi/test.esp32-s3-idf.yaml b/tests/components/wk2168_spi/test.esp32-s3-idf.yaml index 6a60c90fb2..374fe64d16 100644 --- a/tests/components/wk2168_spi/test.esp32-s3-idf.yaml +++ b/tests/components/wk2168_spi/test.esp32-s3-idf.yaml @@ -6,5 +6,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2204_i2c/test.esp32-idf.yaml b/tests/components/wk2204_i2c/test.esp32-idf.yaml index b47e39c389..9d9f0d4931 100644 --- a/tests/components/wk2204_i2c/test.esp32-idf.yaml +++ b/tests/components/wk2204_i2c/test.esp32-idf.yaml @@ -1,4 +1,5 @@ packages: i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2204_i2c/test.esp32-s3-idf.yaml b/tests/components/wk2204_i2c/test.esp32-s3-idf.yaml index e9d826aa7c..115812be97 100644 --- a/tests/components/wk2204_i2c/test.esp32-s3-idf.yaml +++ b/tests/components/wk2204_i2c/test.esp32-s3-idf.yaml @@ -4,5 +4,6 @@ substitutions: packages: i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2204_spi/common.yaml b/tests/components/wk2204_spi/common.yaml index 939c54cc40..0b62a7a009 100644 --- a/tests/components/wk2204_spi/common.yaml +++ b/tests/components/wk2204_spi/common.yaml @@ -1,28 +1,28 @@ wk2204_spi: - - id: wk2204_spi_id + - id: wk2204_spi_bridge cs_pin: ${cs_pin} crystal: 11059200 data_rate: 1MHz uart: - - id: wk2204_spi_id0 + - id: wk2204_spi_uart0 channel: 0 baud_rate: 115200 stop_bits: 1 parity: none - - id: wk2204_spi_id1 + - id: wk2204_spi_uart1 channel: 1 baud_rate: 921600 - - id: wk2204_spi_id2 + - id: wk2204_spi_uart2 channel: 2 baud_rate: 115200 stop_bits: 1 parity: none - - id: wk2204_spi_id3 + - id: wk2204_spi_uart3 channel: 3 baud_rate: 9600 # Ensures a sensor doesn't break validation sensor: - platform: a02yyuw - uart_id: wk2204_spi_id3 + uart_id: wk2204_spi_uart3 id: distance_sensor diff --git a/tests/components/wk2204_spi/test.esp32-idf.yaml b/tests/components/wk2204_spi/test.esp32-idf.yaml index a3352cf880..2b56a46b70 100644 --- a/tests/components/wk2204_spi/test.esp32-idf.yaml +++ b/tests/components/wk2204_spi/test.esp32-idf.yaml @@ -3,5 +3,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2204_spi/test.esp32-s3-idf.yaml b/tests/components/wk2204_spi/test.esp32-s3-idf.yaml index 6a60c90fb2..374fe64d16 100644 --- a/tests/components/wk2204_spi/test.esp32-s3-idf.yaml +++ b/tests/components/wk2204_spi/test.esp32-s3-idf.yaml @@ -6,5 +6,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2212_i2c/test.esp32-idf.yaml b/tests/components/wk2212_i2c/test.esp32-idf.yaml index b47e39c389..9d9f0d4931 100644 --- a/tests/components/wk2212_i2c/test.esp32-idf.yaml +++ b/tests/components/wk2212_i2c/test.esp32-idf.yaml @@ -1,4 +1,5 @@ packages: i2c: !include ../../test_build_components/common/i2c/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2212_i2c/test.esp32-s3-idf.yaml b/tests/components/wk2212_i2c/test.esp32-s3-idf.yaml index e9d826aa7c..115812be97 100644 --- a/tests/components/wk2212_i2c/test.esp32-s3-idf.yaml +++ b/tests/components/wk2212_i2c/test.esp32-s3-idf.yaml @@ -4,5 +4,6 @@ substitutions: packages: i2c: !include ../../test_build_components/common/i2c/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2212_spi/test.esp32-idf.yaml b/tests/components/wk2212_spi/test.esp32-idf.yaml index a3352cf880..2b56a46b70 100644 --- a/tests/components/wk2212_spi/test.esp32-idf.yaml +++ b/tests/components/wk2212_spi/test.esp32-idf.yaml @@ -3,5 +3,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-idf.yaml <<: !include common.yaml diff --git a/tests/components/wk2212_spi/test.esp32-s3-idf.yaml b/tests/components/wk2212_spi/test.esp32-s3-idf.yaml index 6a60c90fb2..374fe64d16 100644 --- a/tests/components/wk2212_spi/test.esp32-s3-idf.yaml +++ b/tests/components/wk2212_spi/test.esp32-s3-idf.yaml @@ -6,5 +6,6 @@ substitutions: packages: spi: !include ../../test_build_components/common/spi/esp32-s3-idf.yaml + uart_bridge_4: !include ../../test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml <<: !include common.yaml diff --git a/tests/test_build_components/build_components_base.esp32-idf.yaml b/tests/test_build_components/build_components_base.esp32-idf.yaml index a62a995e68..dcb951c1ed 100644 --- a/tests/test_build_components/build_components_base.esp32-idf.yaml +++ b/tests/test_build_components/build_components_base.esp32-idf.yaml @@ -3,9 +3,13 @@ esphome: friendly_name: $component_name esp32: - board: nodemcu-32s + # Use board with 8MB flash for testing large component groups + board: esp32-pico-devkitm-2 framework: type: esp-idf + # Use custom partition table with larger app partitions (3MB each) + # Default IDF partitions only allow 1.75MB which is too small for grouped tests + partitions: ../partitions_testing.csv logger: level: VERY_VERBOSE diff --git a/tests/test_build_components/common/camera/esp32-idf.yaml b/tests/test_build_components/common/i2c_camera/esp32-idf.yaml similarity index 83% rename from tests/test_build_components/common/camera/esp32-idf.yaml rename to tests/test_build_components/common/i2c_camera/esp32-idf.yaml index 64f75c699a..a6e7c264cb 100644 --- a/tests/test_build_components/common/camera/esp32-idf.yaml +++ b/tests/test_build_components/common/i2c_camera/esp32-idf.yaml @@ -1,3 +1,10 @@ +# I2C bus for camera sensor +i2c: + - id: i2c_camera_bus + sda: 25 + scl: 23 + frequency: 400kHz + esp32_camera: name: ESP32 Camera data_pins: @@ -15,9 +22,7 @@ esp32_camera: external_clock: pin: 27 frequency: 20MHz - i2c_pins: - sda: 25 - scl: 23 + i2c_id: i2c_camera_bus reset_pin: 15 power_down_pin: 1 resolution: 640x480 diff --git a/tests/test_build_components/common/uart_bridge_2/esp32-idf.yaml b/tests/test_build_components/common/uart_bridge_2/esp32-idf.yaml new file mode 100644 index 0000000000..ff8a2f8d13 --- /dev/null +++ b/tests/test_build_components/common/uart_bridge_2/esp32-idf.yaml @@ -0,0 +1,11 @@ +# Common configuration for 2-channel UART bridge/expander chips +# Used by components like wk2132 that create 2 UART channels +# Defines standardized UART IDs: uart_id_0, uart_id_1 + +substitutions: + # These will be overridden by component-specific values + uart_bridge_address: "0x70" + +# Note: The actual UART instances are created by the bridge component +# This package just ensures all bridge components use the same ID naming convention +# so they can be grouped together without conflicts diff --git a/tests/test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml b/tests/test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml new file mode 100644 index 0000000000..ff8a2f8d13 --- /dev/null +++ b/tests/test_build_components/common/uart_bridge_2/esp32-s3-idf.yaml @@ -0,0 +1,11 @@ +# Common configuration for 2-channel UART bridge/expander chips +# Used by components like wk2132 that create 2 UART channels +# Defines standardized UART IDs: uart_id_0, uart_id_1 + +substitutions: + # These will be overridden by component-specific values + uart_bridge_address: "0x70" + +# Note: The actual UART instances are created by the bridge component +# This package just ensures all bridge components use the same ID naming convention +# so they can be grouped together without conflicts diff --git a/tests/test_build_components/common/uart_bridge_4/esp32-idf.yaml b/tests/test_build_components/common/uart_bridge_4/esp32-idf.yaml new file mode 100644 index 0000000000..bb88037947 --- /dev/null +++ b/tests/test_build_components/common/uart_bridge_4/esp32-idf.yaml @@ -0,0 +1,11 @@ +# Common configuration for 4-channel UART bridge/expander chips +# Used by components like wk2168, wk2204, wk2212 that create 4 UART channels +# Defines standardized UART IDs: uart_id_0, uart_id_1, uart_id_2, uart_id_3 + +substitutions: + # These will be overridden by component-specific values + uart_bridge_address: "0x70" + +# Note: The actual UART instances are created by the bridge component +# This package just ensures all bridge components use the same ID naming convention +# so they can be grouped together without conflicts diff --git a/tests/test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml b/tests/test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml new file mode 100644 index 0000000000..bb88037947 --- /dev/null +++ b/tests/test_build_components/common/uart_bridge_4/esp32-s3-idf.yaml @@ -0,0 +1,11 @@ +# Common configuration for 4-channel UART bridge/expander chips +# Used by components like wk2168, wk2204, wk2212 that create 4 UART channels +# Defines standardized UART IDs: uart_id_0, uart_id_1, uart_id_2, uart_id_3 + +substitutions: + # These will be overridden by component-specific values + uart_bridge_address: "0x70" + +# Note: The actual UART instances are created by the bridge component +# This package just ensures all bridge components use the same ID naming convention +# so they can be grouped together without conflicts diff --git a/tests/test_build_components/partitions_testing.csv b/tests/test_build_components/partitions_testing.csv new file mode 100644 index 0000000000..0ca8c24e05 --- /dev/null +++ b/tests/test_build_components/partitions_testing.csv @@ -0,0 +1,10 @@ +# ESP-IDF Partition Table for ESPHome Component Testing +# Single app partition to maximize space for large component group testing +# Fits in 4MB flash +# Name, Type, SubType, Offset, Size, Flags +nvs, data, nvs, 0x9000, 0x4000, +otadata, data, ota, , 0x2000, +phy_init, data, phy, , 0x1000, +factory, app, factory, 0x10000, 0x300000, +nvs_key, data, nvs_keys,, 0x1000, +coredump, data, coredump,, 0xEB000, From d7832c44bc114461778e1d8aa8ee827e78e71b38 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:45:37 -1000 Subject: [PATCH 11/31] [sensor] Fix sliding window filter memory fragmentation with FixedVector ring buffer --- esphome/components/sensor/filter.cpp | 235 +++++++++------------------ esphome/components/sensor/filter.h | 144 +++++++++------- 2 files changed, 165 insertions(+), 214 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 3241ae28af..900acd281a 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -32,50 +32,73 @@ void Filter::initialize(Sensor *parent, Filter *next) { this->next_ = next; } -// MedianFilter -MedianFilter::MedianFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MedianFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MedianFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MedianFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f)", this, value); +// SlidingWindowFilter +SlidingWindowFilter::SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at) + : window_size_(window_size), send_every_(send_every), send_at_(send_every - send_first_at) { + // Allocate ring buffer once at initialization + this->window_.init(window_size); +} +void SlidingWindowFilter::set_window_size(size_t window_size) { + this->window_size_ = window_size; + // Reallocate buffer with new size + this->window_.init(window_size); + this->window_head_ = 0; + this->window_count_ = 0; +} + +optional SlidingWindowFilter::new_value(float value) { + // Add value to ring buffer + if (this->window_count_ < this->window_size_) { + // Buffer not yet full - just append + this->window_.push_back(value); + this->window_count_++; + this->window_head_ = this->window_count_; + } else { + // Buffer full - overwrite oldest value (ring buffer) + this->window_[this->window_head_] = value; + this->window_head_ = (this->window_head_ + 1) % this->window_size_; + } + + // Check if we should send a result if (++this->send_at_ >= this->send_every_) { this->send_at_ = 0; - - float median = NAN; - if (!this->queue_.empty()) { - // Copy queue without NaN values - std::vector median_queue; - median_queue.reserve(this->queue_.size()); - for (auto v : this->queue_) { - if (!std::isnan(v)) { - median_queue.push_back(v); - } - } - - sort(median_queue.begin(), median_queue.end()); - - size_t queue_size = median_queue.size(); - if (queue_size) { - if (queue_size % 2) { - median = median_queue[queue_size / 2]; - } else { - median = (median_queue[queue_size / 2] + median_queue[(queue_size / 2) - 1]) / 2.0f; - } - } - } - - ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f) SENDING %f", this, value, median); - return median; + float result = this->compute_result_(); + ESP_LOGVV(TAG, "SlidingWindowFilter(%p)::new_value(%f) SENDING %f", this, value, result); + return result; } return {}; } +// SortedWindowFilter +FixedVector SortedWindowFilter::get_sorted_values_() { + // Copy window without NaN values using FixedVector (no heap allocation) + FixedVector sorted_values; + sorted_values.init(this->window_count_); + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + sorted_values.push_back(v); + } + } + sort(sorted_values.begin(), sorted_values.end()); + return sorted_values; +} + +// MedianFilter +float MedianFilter::compute_result_() { + FixedVector sorted_values = this->get_sorted_values_(); + if (sorted_values.empty()) + return NAN; + + size_t size = sorted_values.size(); + if (size % 2) { + return sorted_values[size / 2]; + } else { + return (sorted_values[size / 2] + sorted_values[(size / 2) - 1]) / 2.0f; + } +} + // SkipInitialFilter SkipInitialFilter::SkipInitialFilter(size_t num_to_ignore) : num_to_ignore_(num_to_ignore) {} optional SkipInitialFilter::new_value(float value) { @@ -91,136 +114,36 @@ optional SkipInitialFilter::new_value(float value) { // QuantileFilter QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size), quantile_(quantile) {} -void QuantileFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void QuantileFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -void QuantileFilter::set_quantile(float quantile) { this->quantile_ = quantile; } -optional QuantileFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f), quantile:%f", this, value, this->quantile_); + : SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {} - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; +float QuantileFilter::compute_result_() { + FixedVector sorted_values = this->get_sorted_values_(); + if (sorted_values.empty()) + return NAN; - float result = NAN; - if (!this->queue_.empty()) { - // Copy queue without NaN values - std::vector quantile_queue; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - quantile_queue.push_back(v); - } - } - - sort(quantile_queue.begin(), quantile_queue.end()); - - size_t queue_size = quantile_queue.size(); - if (queue_size) { - size_t position = ceilf(queue_size * this->quantile_) - 1; - ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, queue_size); - result = quantile_queue[position]; - } - } - - ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f) SENDING %f", this, value, result); - return result; - } - return {}; + size_t position = ceilf(sorted_values.size() * this->quantile_) - 1; + ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, sorted_values.size()); + return sorted_values[position]; } // MinFilter -MinFilter::MinFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MinFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MinFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MinFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float min = NAN; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - min = std::isnan(min) ? v : std::min(min, v); - } - } - - ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f) SENDING %f", this, value, min); - return min; - } - return {}; -} +float MinFilter::compute_result_() { return this->find_extremum_>(); } // MaxFilter -MaxFilter::MaxFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MaxFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MaxFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MaxFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float max = NAN; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - max = std::isnan(max) ? v : std::max(max, v); - } - } - - ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f) SENDING %f", this, value, max); - return max; - } - return {}; -} +float MaxFilter::compute_result_() { return this->find_extremum_>(); } // SlidingWindowMovingAverageFilter -SlidingWindowMovingAverageFilter::SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every, - size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void SlidingWindowMovingAverageFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void SlidingWindowMovingAverageFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional SlidingWindowMovingAverageFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float sum = 0; - size_t valid_count = 0; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - sum += v; - valid_count++; - } +float SlidingWindowMovingAverageFilter::compute_result_() { + float sum = 0; + size_t valid_count = 0; + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + sum += v; + valid_count++; } - - float average = NAN; - if (valid_count) { - average = sum / valid_count; - } - - ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f) SENDING %f", this, value, average); - return average; } - return {}; + return valid_count ? sum / valid_count : NAN; } // ExponentialMovingAverageFilter diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 49d83e5b4b..0154cb8321 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -44,11 +44,78 @@ class Filter { Sensor *parent_{nullptr}; }; +/** Base class for filters that use a sliding window of values. + * + * Uses a ring buffer to efficiently maintain a fixed-size sliding window without + * reallocations or pop_front() overhead. Eliminates deque fragmentation issues. + */ +class SlidingWindowFilter : public Filter { + public: + SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at); + + void set_send_every(size_t send_every) { this->send_every_ = send_every; } + void set_window_size(size_t window_size); + + optional new_value(float value) final; + + protected: + /// Called by new_value() to compute the filtered result from the current window + virtual float compute_result_() = 0; + + /// Access the sliding window values (ring buffer implementation) + /// Use: for (size_t i = 0; i < window_count_; i++) { float val = window_[i]; } + FixedVector window_; + size_t window_head_{0}; ///< Index where next value will be written + size_t window_count_{0}; ///< Number of valid values in window (0 to window_size_) + size_t window_size_; ///< Maximum window size + size_t send_every_; ///< Send result every N values + size_t send_at_; ///< Counter for send_every +}; + +/** Base class for Min/Max filters. + * + * Provides a templated helper to find extremum values efficiently. + */ +class MinMaxFilter : public SlidingWindowFilter { + public: + using SlidingWindowFilter::SlidingWindowFilter; + + protected: + /// Helper to find min or max value in window, skipping NaN values + /// Usage: find_extremum_>() for min, find_extremum_>() for max + template float find_extremum_() { + float result = NAN; + Compare comp; + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + result = std::isnan(result) ? v : (comp(v, result) ? v : result); + } + } + return result; + } +}; + +/** Base class for filters that need a sorted window (Median, Quantile). + * + * Extends SlidingWindowFilter to provide a helper that creates a sorted copy + * of non-NaN values from the window. + */ +class SortedWindowFilter : public SlidingWindowFilter { + public: + using SlidingWindowFilter::SlidingWindowFilter; + + protected: + /// Helper to get sorted non-NaN values from the window + /// Returns empty FixedVector if all values are NaN + FixedVector get_sorted_values_(); +}; + /** Simple quantile filter. * - * Takes the quantile of the last values and pushes it out every . + * Takes the quantile of the last values and pushes it out every . */ -class QuantileFilter : public Filter { +class QuantileFilter : public SortedWindowFilter { public: /** Construct a QuantileFilter. * @@ -61,25 +128,18 @@ class QuantileFilter : public Filter { */ explicit QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile); - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); - void set_quantile(float quantile); + void set_quantile(float quantile) { this->quantile_ = quantile; } protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result_() override; float quantile_; }; /** Simple median filter. * - * Takes the median of the last values and pushes it out every . + * Takes the median of the last values and pushes it out every . */ -class MedianFilter : public Filter { +class MedianFilter : public SortedWindowFilter { public: /** Construct a MedianFilter. * @@ -89,18 +149,10 @@ class MedianFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MedianFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using SortedWindowFilter::SortedWindowFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result_() override; }; /** Simple skip filter. @@ -123,9 +175,9 @@ class SkipInitialFilter : public Filter { /** Simple min filter. * - * Takes the min of the last values and pushes it out every . + * Takes the min of the last values and pushes it out every . */ -class MinFilter : public Filter { +class MinFilter : public MinMaxFilter { public: /** Construct a MinFilter. * @@ -135,25 +187,17 @@ class MinFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MinFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using MinMaxFilter::MinMaxFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result_() override; }; /** Simple max filter. * - * Takes the max of the last values and pushes it out every . + * Takes the max of the last values and pushes it out every . */ -class MaxFilter : public Filter { +class MaxFilter : public MinMaxFilter { public: /** Construct a MaxFilter. * @@ -163,18 +207,10 @@ class MaxFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MaxFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using MinMaxFilter::MinMaxFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result_() override; }; /** Simple sliding window moving average filter. @@ -182,7 +218,7 @@ class MaxFilter : public Filter { * Essentially just takes takes the average of the last window_size values and pushes them out * every send_every. */ -class SlidingWindowMovingAverageFilter : public Filter { +class SlidingWindowMovingAverageFilter : public SlidingWindowFilter { public: /** Construct a SlidingWindowMovingAverageFilter. * @@ -192,18 +228,10 @@ class SlidingWindowMovingAverageFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using SlidingWindowFilter::SlidingWindowFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result_() override; }; /** Simple exponential moving average filter. From 12874187dd80c0853aa6ce727b7f317ad7c88e2c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:50:27 -1000 Subject: [PATCH 12/31] fix --- esphome/components/sensor/filter.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 0154cb8321..9c2710bc93 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -60,7 +60,7 @@ class SlidingWindowFilter : public Filter { protected: /// Called by new_value() to compute the filtered result from the current window - virtual float compute_result_() = 0; + virtual float compute_result() = 0; /// Access the sliding window values (ring buffer implementation) /// Use: for (size_t i = 0; i < window_count_; i++) { float val = window_[i]; } @@ -131,7 +131,7 @@ class QuantileFilter : public SortedWindowFilter { void set_quantile(float quantile) { this->quantile_ = quantile; } protected: - float compute_result_() override; + float compute_result() override; float quantile_; }; @@ -152,7 +152,7 @@ class MedianFilter : public SortedWindowFilter { using SortedWindowFilter::SortedWindowFilter; protected: - float compute_result_() override; + float compute_result() override; }; /** Simple skip filter. @@ -190,7 +190,7 @@ class MinFilter : public MinMaxFilter { using MinMaxFilter::MinMaxFilter; protected: - float compute_result_() override; + float compute_result() override; }; /** Simple max filter. @@ -210,7 +210,7 @@ class MaxFilter : public MinMaxFilter { using MinMaxFilter::MinMaxFilter; protected: - float compute_result_() override; + float compute_result() override; }; /** Simple sliding window moving average filter. From 36f851130950d071ba914efaba95849e836d5bad Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:50:32 -1000 Subject: [PATCH 13/31] fix --- esphome/components/sensor/filter.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 9c2710bc93..b391048521 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -231,7 +231,7 @@ class SlidingWindowMovingAverageFilter : public SlidingWindowFilter { using SlidingWindowFilter::SlidingWindowFilter; protected: - float compute_result_() override; + float compute_result() override; }; /** Simple exponential moving average filter. From cd252a33f9c0a16c67a8edc14f0311182eaf4c75 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:51:03 -1000 Subject: [PATCH 14/31] fix --- esphome/components/sensor/filter.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 900acd281a..6406d670c1 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -63,7 +63,7 @@ optional SlidingWindowFilter::new_value(float value) { // Check if we should send a result if (++this->send_at_ >= this->send_every_) { this->send_at_ = 0; - float result = this->compute_result_(); + float result = this->compute_result(); ESP_LOGVV(TAG, "SlidingWindowFilter(%p)::new_value(%f) SENDING %f", this, value, result); return result; } @@ -86,7 +86,7 @@ FixedVector SortedWindowFilter::get_sorted_values_() { } // MedianFilter -float MedianFilter::compute_result_() { +float MedianFilter::compute_result() { FixedVector sorted_values = this->get_sorted_values_(); if (sorted_values.empty()) return NAN; @@ -116,7 +116,7 @@ optional SkipInitialFilter::new_value(float value) { QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile) : SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {} -float QuantileFilter::compute_result_() { +float QuantileFilter::compute_result() { FixedVector sorted_values = this->get_sorted_values_(); if (sorted_values.empty()) return NAN; @@ -127,10 +127,10 @@ float QuantileFilter::compute_result_() { } // MinFilter -float MinFilter::compute_result_() { return this->find_extremum_>(); } +float MinFilter::compute_result() { return this->find_extremum_>(); } // MaxFilter -float MaxFilter::compute_result_() { return this->find_extremum_>(); } +float MaxFilter::compute_result() { return this->find_extremum_>(); } // SlidingWindowMovingAverageFilter float SlidingWindowMovingAverageFilter::compute_result_() { From 4c24545b826215fedf2e4c4aaa0cd15d01e2b25d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 17:51:08 -1000 Subject: [PATCH 15/31] fix --- esphome/components/sensor/filter.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 6406d670c1..a6819dd73c 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -133,7 +133,7 @@ float MinFilter::compute_result() { return this->find_extremum_ float MaxFilter::compute_result() { return this->find_extremum_>(); } // SlidingWindowMovingAverageFilter -float SlidingWindowMovingAverageFilter::compute_result_() { +float SlidingWindowMovingAverageFilter::compute_result() { float sum = 0; size_t valid_count = 0; for (size_t i = 0; i < this->window_count_; i++) { From b074ca8a1ed6c7658049bff8bc4e47e862d50b09 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 18:00:33 -1000 Subject: [PATCH 16/31] fix --- esphome/components/sensor/filter.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index a6819dd73c..0e52f9d94f 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -53,7 +53,7 @@ optional SlidingWindowFilter::new_value(float value) { // Buffer not yet full - just append this->window_.push_back(value); this->window_count_++; - this->window_head_ = this->window_count_; + this->window_head_ = this->window_count_ % this->window_size_; } else { // Buffer full - overwrite oldest value (ring buffer) this->window_[this->window_head_] = value; @@ -81,7 +81,7 @@ FixedVector SortedWindowFilter::get_sorted_values_() { sorted_values.push_back(v); } } - sort(sorted_values.begin(), sorted_values.end()); + std::sort(sorted_values.begin(), sorted_values.end()); return sorted_values; } From 9b6707c1c0b1a6e38bd1ff30d9a0621dd6aff7a9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 18:25:42 -1000 Subject: [PATCH 17/31] tests --- .../fixtures/sensor_filters_nan_handling.yaml | 50 +++ ...sensor_filters_ring_buffer_wraparound.yaml | 36 ++ .../sensor_filters_sliding_window.yaml | 78 ++++ .../test_sensor_filters_sliding_window.py | 385 ++++++++++++++++++ 4 files changed, 549 insertions(+) create mode 100644 tests/integration/fixtures/sensor_filters_nan_handling.yaml create mode 100644 tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml create mode 100644 tests/integration/fixtures/sensor_filters_sliding_window.yaml create mode 100644 tests/integration/test_sensor_filters_sliding_window.py diff --git a/tests/integration/fixtures/sensor_filters_nan_handling.yaml b/tests/integration/fixtures/sensor_filters_nan_handling.yaml new file mode 100644 index 0000000000..20fae64e8b --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_nan_handling.yaml @@ -0,0 +1,50 @@ +esphome: + name: test-nan-handling + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +sensor: + - platform: template + name: "Source NaN Sensor" + id: source_nan_sensor + accuracy_decimals: 2 + + - platform: copy + source_id: source_nan_sensor + name: "Min NaN Sensor" + id: min_nan_sensor + filters: + - min: + window_size: 5 + send_every: 5 + + - platform: copy + source_id: source_nan_sensor + name: "Max NaN Sensor" + id: max_nan_sensor + filters: + - max: + window_size: 5 + send_every: 5 + +button: + - platform: template + name: "Publish NaN Values Button" + id: publish_nan_button + on_press: + - lambda: |- + // Publish 10 values with NaN mixed in: 10, NaN, 5, NaN, 15, 8, NaN, 12, 3, NaN + id(source_nan_sensor).publish_state(10.0); + id(source_nan_sensor).publish_state(NAN); + id(source_nan_sensor).publish_state(5.0); + id(source_nan_sensor).publish_state(NAN); + id(source_nan_sensor).publish_state(15.0); + id(source_nan_sensor).publish_state(8.0); + id(source_nan_sensor).publish_state(NAN); + id(source_nan_sensor).publish_state(12.0); + id(source_nan_sensor).publish_state(3.0); + id(source_nan_sensor).publish_state(NAN); diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml new file mode 100644 index 0000000000..1ff9ec542a --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml @@ -0,0 +1,36 @@ +esphome: + name: test-ring-buffer-wraparound + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +sensor: + - platform: template + name: "Source Wraparound Sensor" + id: source_wraparound + accuracy_decimals: 2 + + - platform: copy + source_id: source_wraparound + name: "Wraparound Min Sensor" + id: wraparound_min_sensor + filters: + - min: + window_size: 3 + send_every: 3 + +button: + - platform: template + name: "Publish Wraparound Button" + id: publish_wraparound_button + on_press: + - lambda: |- + // Publish 9 values to test ring buffer wraparound + // Values: 10, 20, 30, 5, 25, 15, 40, 35, 20 + float values[] = {10.0, 20.0, 30.0, 5.0, 25.0, 15.0, 40.0, 35.0, 20.0}; + for (int i = 0; i < 9; i++) { + id(source_wraparound).publish_state(values[i]); + } diff --git a/tests/integration/fixtures/sensor_filters_sliding_window.yaml b/tests/integration/fixtures/sensor_filters_sliding_window.yaml new file mode 100644 index 0000000000..a2ae3182b8 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_sliding_window.yaml @@ -0,0 +1,78 @@ +esphome: + name: test-sliding-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # Min filter sensor + - platform: copy + source_id: source_sensor + name: "Min Sensor" + id: min_sensor + filters: + - min: + window_size: 5 + send_every: 5 + + # Max filter sensor + - platform: copy + source_id: source_sensor + name: "Max Sensor" + id: max_sensor + filters: + - max: + window_size: 5 + send_every: 5 + + # Median filter sensor + - platform: copy + source_id: source_sensor + name: "Median Sensor" + id: median_sensor + filters: + - median: + window_size: 5 + send_every: 5 + + # Quantile filter sensor (90th percentile) + - platform: copy + source_id: source_sensor + name: "Quantile Sensor" + id: quantile_sensor + filters: + - quantile: + window_size: 5 + send_every: 5 + quantile: 0.9 + + # Moving average filter sensor + - platform: copy + source_id: source_sensor + name: "Moving Avg Sensor" + id: moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 5 + +# Button to trigger publishing test values +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - lambda: |- + // Publish 10 values: 1.0, 2.0, ..., 10.0 + for (int i = 1; i <= 10; i++) { + id(source_sensor).publish_state(float(i)); + } diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py new file mode 100644 index 0000000000..943502c38f --- /dev/null +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -0,0 +1,385 @@ +"""Test sensor sliding window filter functionality.""" + +from __future__ import annotations + +import asyncio + +from aioesphomeapi import EntityInfo, EntityState, SensorState +import pytest + +from .types import APIClientConnectedFactory, RunCompiledFunction + + +def build_key_to_sensor_mapping( + entities: list[EntityInfo], sensor_names: list[str] +) -> dict[int, str]: + """Build a mapping from entity keys to sensor names. + + Args: + entities: List of entity info objects from the API + sensor_names: List of sensor names to search for in object_ids + + Returns: + Dictionary mapping entity keys to sensor names + """ + key_to_sensor: dict[int, str] = {} + for entity in entities: + obj_id = entity.object_id.lower() + for sensor_name in sensor_names: + if sensor_name in obj_id: + key_to_sensor[entity.key] = sensor_name + break + return key_to_sensor + + +@pytest.mark.asyncio +async def test_sensor_filters_sliding_window( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that sliding window filters (min, max, median, quantile, moving_average) work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for each sensor + sensor_states: dict[str, list[float]] = { + "min_sensor": [], + "max_sensor": [], + "median_sensor": [], + "quantile_sensor": [], + "moving_avg_sensor": [], + } + + # Futures to track when we receive expected values + min_received = loop.create_future() + max_received = loop.create_future() + median_received = loop.create_future() + quantile_received = loop.create_future() + moving_avg_received = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values (initial states) + if state.missing_state: + return + + # Get the sensor name from the key mapping + sensor_name = key_to_sensor.get(state.key) + if sensor_name and sensor_name in sensor_states: + sensor_states[sensor_name].append(state.state) + + # Check if we received the expected final value + # After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6] + # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) + if ( + sensor_name == "min_sensor" + and abs(state.state - 2.0) < 0.01 + and not min_received.done() + ): + min_received.set_result(True) + elif ( + sensor_name == "max_sensor" + and abs(state.state - 6.0) < 0.01 + and not max_received.done() + ): + max_received.set_result(True) + elif ( + sensor_name == "median_sensor" + and abs(state.state - 4.0) < 0.01 + and not median_received.done() + ): + # Median of [2, 3, 4, 5, 6] = 4 + median_received.set_result(True) + elif ( + sensor_name == "quantile_sensor" + and abs(state.state - 6.0) < 0.01 + and not quantile_received.done() + ): + # 90th percentile of [2, 3, 4, 5, 6] = 6 + quantile_received.set_result(True) + elif ( + sensor_name == "moving_avg_sensor" + and abs(state.state - 4.0) < 0.01 + and not moving_avg_received.done() + ): + # Average of [2, 3, 4, 5, 6] = 4 + moving_avg_received.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_sensor_mapping( + entities, + [ + "min_sensor", + "max_sensor", + "median_sensor", + "quantile_sensor", + "moving_avg_sensor", + ], + ) + + # Subscribe to state changes AFTER building mapping + client.subscribe_states(on_state) + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Values Button not found" + + # Press the button to publish test values + client.button_command(publish_button.key) + + # Wait for all sensors to receive their final values + try: + await asyncio.wait_for( + asyncio.gather( + min_received, + max_received, + median_received, + quantile_received, + moving_avg_received, + ), + timeout=10.0, + ) + except TimeoutError: + # Provide detailed failure info + pytest.fail( + f"Timeout waiting for expected values. Received states:\n" + f" min: {sensor_states['min_sensor']}\n" + f" max: {sensor_states['max_sensor']}\n" + f" median: {sensor_states['median_sensor']}\n" + f" quantile: {sensor_states['quantile_sensor']}\n" + f" moving_avg: {sensor_states['moving_avg_sensor']}" + ) + + # Verify we got the expected values + # With batch_delay: 0ms, we should receive all outputs + # Filters output at positions 1 and 6 (send_every: 5) + assert len(sensor_states["min_sensor"]) == 2, ( + f"Min sensor should have 2 values, got {len(sensor_states['min_sensor'])}: {sensor_states['min_sensor']}" + ) + assert len(sensor_states["max_sensor"]) == 2, ( + f"Max sensor should have 2 values, got {len(sensor_states['max_sensor'])}: {sensor_states['max_sensor']}" + ) + assert len(sensor_states["median_sensor"]) == 2 + assert len(sensor_states["quantile_sensor"]) == 2 + assert len(sensor_states["moving_avg_sensor"]) == 2 + + # Verify the first output (after 1 value: [1]) + assert abs(sensor_states["min_sensor"][0] - 1.0) < 0.01, ( + f"First min should be 1.0, got {sensor_states['min_sensor'][0]}" + ) + assert abs(sensor_states["max_sensor"][0] - 1.0) < 0.01, ( + f"First max should be 1.0, got {sensor_states['max_sensor'][0]}" + ) + assert abs(sensor_states["median_sensor"][0] - 1.0) < 0.01, ( + f"First median should be 1.0, got {sensor_states['median_sensor'][0]}" + ) + assert abs(sensor_states["moving_avg_sensor"][0] - 1.0) < 0.01, ( + f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}" + ) + + # Verify the second output (after 6 values, window has [2, 3, 4, 5, 6]) + assert abs(sensor_states["min_sensor"][1] - 2.0) < 0.01, ( + f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}" + ) + assert abs(sensor_states["max_sensor"][1] - 6.0) < 0.01, ( + f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}" + ) + assert abs(sensor_states["median_sensor"][1] - 4.0) < 0.01, ( + f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}" + ) + assert abs(sensor_states["moving_avg_sensor"][1] - 4.0) < 0.01, ( + f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}" + ) + + +@pytest.mark.asyncio +async def test_sensor_filters_nan_handling( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that sliding window filters handle NaN values correctly.""" + loop = asyncio.get_running_loop() + + # Track states + min_states: list[float] = [] + max_states: list[float] = [] + + # Future to track completion + filters_completed = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values (initial states) + if state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + if sensor_name == "min_nan": + min_states.append(state.state) + elif sensor_name == "max_nan": + max_states.append(state.state) + + # Check if both have received their final values + # With batch_delay: 0ms, we should receive 2 outputs each + if ( + len(min_states) >= 2 + and len(max_states) >= 2 + and not filters_completed.done() + ): + filters_completed.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"]) + + # Subscribe to state changes AFTER building mapping + client.subscribe_states(on_state) + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_nan_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish NaN Values Button not found" + + # Press the button + client.button_command(publish_button.key) + + # Wait for filters to process + try: + await asyncio.wait_for(filters_completed, timeout=10.0) + except TimeoutError: + pytest.fail( + f"Timeout waiting for NaN handling. Received:\n" + f" min_states: {min_states}\n" + f" max_states: {max_states}" + ) + + # Verify NaN values were ignored + # With batch_delay: 0ms, we should receive both outputs (at positions 1 and 6) + # Position 1: window=[10], min=10, max=10 + # Position 6: window=[NaN, 5, NaN, 15, 8], ignoring NaN -> [5, 15, 8], min=5, max=15 + assert len(min_states) == 2, ( + f"Should have 2 min states, got {len(min_states)}: {min_states}" + ) + assert len(max_states) == 2, ( + f"Should have 2 max states, got {len(max_states)}: {max_states}" + ) + + # First output + assert abs(min_states[0] - 10.0) < 0.01, ( + f"First min should be 10.0, got {min_states[0]}" + ) + assert abs(max_states[0] - 10.0) < 0.01, ( + f"First max should be 10.0, got {max_states[0]}" + ) + + # Second output - verify NaN values were ignored + assert abs(min_states[1] - 5.0) < 0.01, ( + f"Second min should ignore NaN and return 5.0, got {min_states[1]}" + ) + assert abs(max_states[1] - 15.0) < 0.01, ( + f"Second max should ignore NaN and return 15.0, got {max_states[1]}" + ) + + +@pytest.mark.asyncio +async def test_sensor_filters_ring_buffer_wraparound( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ring buffer correctly wraps around when window fills up.""" + loop = asyncio.get_running_loop() + + min_states: list[float] = [] + + test_completed = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track min sensor states.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values (initial states) + if state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + if sensor_name == "wraparound_min": + min_states.append(state.state) + # With batch_delay: 0ms, we should receive all 3 outputs + if len(min_states) >= 3 and not test_completed.done(): + test_completed.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"]) + + # Subscribe to state changes AFTER building mapping + client.subscribe_states(on_state) + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_wraparound_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Wraparound Button not found" + + # Press the button + # Will publish: 10, 20, 30, 5, 25, 15, 40, 35, 20 + client.button_command(publish_button.key) + + # Wait for completion + try: + await asyncio.wait_for(test_completed, timeout=10.0) + except TimeoutError: + pytest.fail(f"Timeout waiting for wraparound test. Received: {min_states}") + + # Verify outputs + # With window_size=3, send_every=3, we get outputs at positions 1, 4, 7 + # Position 1: window=[10], min=10 + # Position 4: window=[20, 30, 5], min=5 + # Position 7: window=[15, 40, 35], min=15 + # With batch_delay: 0ms, we should receive all 3 outputs + assert len(min_states) == 3, ( + f"Should have 3 states, got {len(min_states)}: {min_states}" + ) + assert abs(min_states[0] - 10.0) < 0.01, ( + f"First min should be 10.0, got {min_states[0]}" + ) + assert abs(min_states[1] - 5.0) < 0.01, ( + f"Second min should be 5.0, got {min_states[1]}" + ) + assert abs(min_states[2] - 15.0) < 0.01, ( + f"Third min should be 15.0, got {min_states[2]}" + ) From 447ee3da39ef1327693ac999c2c80c75fae90b05 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 18:26:23 -1000 Subject: [PATCH 18/31] tests --- .../test_sensor_filters_sliding_window.py | 78 ++++++++++--------- 1 file changed, 40 insertions(+), 38 deletions(-) diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py index 943502c38f..0c7aec70aa 100644 --- a/tests/integration/test_sensor_filters_sliding_window.py +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -68,45 +68,47 @@ async def test_sensor_filters_sliding_window( # Get the sensor name from the key mapping sensor_name = key_to_sensor.get(state.key) - if sensor_name and sensor_name in sensor_states: - sensor_states[sensor_name].append(state.state) + if not sensor_name or sensor_name not in sensor_states: + return - # Check if we received the expected final value - # After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6] - # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) - if ( - sensor_name == "min_sensor" - and abs(state.state - 2.0) < 0.01 - and not min_received.done() - ): - min_received.set_result(True) - elif ( - sensor_name == "max_sensor" - and abs(state.state - 6.0) < 0.01 - and not max_received.done() - ): - max_received.set_result(True) - elif ( - sensor_name == "median_sensor" - and abs(state.state - 4.0) < 0.01 - and not median_received.done() - ): - # Median of [2, 3, 4, 5, 6] = 4 - median_received.set_result(True) - elif ( - sensor_name == "quantile_sensor" - and abs(state.state - 6.0) < 0.01 - and not quantile_received.done() - ): - # 90th percentile of [2, 3, 4, 5, 6] = 6 - quantile_received.set_result(True) - elif ( - sensor_name == "moving_avg_sensor" - and abs(state.state - 4.0) < 0.01 - and not moving_avg_received.done() - ): - # Average of [2, 3, 4, 5, 6] = 4 - moving_avg_received.set_result(True) + sensor_states[sensor_name].append(state.state) + + # Check if we received the expected final value + # After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6] + # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) + if ( + sensor_name == "min_sensor" + and abs(state.state - 2.0) < 0.01 + and not min_received.done() + ): + min_received.set_result(True) + elif ( + sensor_name == "max_sensor" + and abs(state.state - 6.0) < 0.01 + and not max_received.done() + ): + max_received.set_result(True) + elif ( + sensor_name == "median_sensor" + and abs(state.state - 4.0) < 0.01 + and not median_received.done() + ): + # Median of [2, 3, 4, 5, 6] = 4 + median_received.set_result(True) + elif ( + sensor_name == "quantile_sensor" + and abs(state.state - 6.0) < 0.01 + and not quantile_received.done() + ): + # 90th percentile of [2, 3, 4, 5, 6] = 6 + quantile_received.set_result(True) + elif ( + sensor_name == "moving_avg_sensor" + and abs(state.state - 4.0) < 0.01 + and not moving_avg_received.done() + ): + # Average of [2, 3, 4, 5, 6] = 4 + moving_avg_received.set_result(True) async with ( run_compiled(yaml_config), From a4b14902db2f0d7fdba63d866b456a77c68b8655 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 18:44:37 -1000 Subject: [PATCH 19/31] perf --- esphome/components/sensor/filter.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 0e52f9d94f..4863c00a29 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -53,11 +53,13 @@ optional SlidingWindowFilter::new_value(float value) { // Buffer not yet full - just append this->window_.push_back(value); this->window_count_++; - this->window_head_ = this->window_count_ % this->window_size_; } else { // Buffer full - overwrite oldest value (ring buffer) this->window_[this->window_head_] = value; - this->window_head_ = (this->window_head_ + 1) % this->window_size_; + this->window_head_++; + if (this->window_head_ >= this->window_size_) { + this->window_head_ = 0; + } } // Check if we should send a result From e3089ff0f6b7c015e10adc5ca7df05060177a647 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:21:33 -1000 Subject: [PATCH 20/31] tweak --- esphome/components/sensor/__init__.py | 42 +++++++++++++++ esphome/components/sensor/filter.cpp | 73 +++++++++++++++++++++++++ esphome/components/sensor/filter.h | 76 +++++++++++++++++++++++++++ 3 files changed, 191 insertions(+) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index 2b99f68ac0..1585a6342f 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -249,6 +249,9 @@ MaxFilter = sensor_ns.class_("MaxFilter", Filter) SlidingWindowMovingAverageFilter = sensor_ns.class_( "SlidingWindowMovingAverageFilter", Filter ) +StreamingMinFilter = sensor_ns.class_("StreamingMinFilter", Filter) +StreamingMaxFilter = sensor_ns.class_("StreamingMaxFilter", Filter) +StreamingMovingAverageFilter = sensor_ns.class_("StreamingMovingAverageFilter", Filter) ExponentialMovingAverageFilter = sensor_ns.class_( "ExponentialMovingAverageFilter", Filter ) @@ -452,6 +455,19 @@ async def skip_initial_filter_to_code(config, filter_id): @FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) async def min_filter_to_code(config, filter_id): + window_size = config[CONF_WINDOW_SIZE] + send_every = config[CONF_SEND_EVERY] + send_first_at = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) + if window_size == send_every: + return cg.new_Pvariable( + filter_id, + StreamingMinFilter, + window_size, + send_first_at, + ) return cg.new_Pvariable( filter_id, config[CONF_WINDOW_SIZE], @@ -474,6 +490,19 @@ MAX_SCHEMA = cv.All( @FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): + window_size = config[CONF_WINDOW_SIZE] + send_every = config[CONF_SEND_EVERY] + send_first_at = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) + if window_size == send_every: + return cg.new_Pvariable( + filter_id, + StreamingMaxFilter, + window_size, + send_first_at, + ) return cg.new_Pvariable( filter_id, config[CONF_WINDOW_SIZE], @@ -500,6 +529,19 @@ SLIDING_AVERAGE_SCHEMA = cv.All( SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): + window_size = config[CONF_WINDOW_SIZE] + send_every = config[CONF_SEND_EVERY] + send_first_at = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + # Saves 99.94% memory for large windows (e.g., 20KB → 12 bytes for window_size=5000) + if window_size == send_every: + return cg.new_Pvariable( + filter_id, + StreamingMovingAverageFilter, + window_size, + send_first_at, + ) return cg.new_Pvariable( filter_id, config[CONF_WINDOW_SIZE], diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 4863c00a29..c804125dcc 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -468,5 +468,78 @@ optional ToNTCTemperatureFilter::new_value(float value) { return temp; } +// StreamingFilter (base class) +StreamingFilter::StreamingFilter(size_t window_size, size_t send_first_at) + : window_size_(window_size), send_first_at_(send_first_at) {} + +optional StreamingFilter::new_value(float value) { + // Process the value (child class tracks min/max/sum/etc) + this->process_value(value); + + this->count_++; + + // Check if we should send (handle send_first_at for first value) + bool should_send = false; + if (this->first_send_ && this->count_ >= this->send_first_at_) { + should_send = true; + this->first_send_ = false; + } else if (!this->first_send_ && this->count_ >= this->window_size_) { + should_send = true; + } + + if (should_send) { + float result = this->compute_batch_result(); + // Reset for next batch + this->count_ = 0; + this->reset_batch(); + ESP_LOGVV(TAG, "StreamingFilter(%p)::new_value(%f) SENDING %f", this, value, result); + return result; + } + + return {}; +} + +// StreamingMinFilter +void StreamingMinFilter::process_value(float value) { + // Update running minimum (ignore NaN values) + if (!std::isnan(value)) { + this->current_min_ = std::isnan(this->current_min_) ? value : std::min(this->current_min_, value); + } +} + +float StreamingMinFilter::compute_batch_result() { return this->current_min_; } + +void StreamingMinFilter::reset_batch() { this->current_min_ = NAN; } + +// StreamingMaxFilter +void StreamingMaxFilter::process_value(float value) { + // Update running maximum (ignore NaN values) + if (!std::isnan(value)) { + this->current_max_ = std::isnan(this->current_max_) ? value : std::max(this->current_max_, value); + } +} + +float StreamingMaxFilter::compute_batch_result() { return this->current_max_; } + +void StreamingMaxFilter::reset_batch() { this->current_max_ = NAN; } + +// StreamingMovingAverageFilter +void StreamingMovingAverageFilter::process_value(float value) { + // Accumulate sum (ignore NaN values) + if (!std::isnan(value)) { + this->sum_ += value; + this->valid_count_++; + } +} + +float StreamingMovingAverageFilter::compute_batch_result() { + return this->valid_count_ > 0 ? this->sum_ / this->valid_count_ : NAN; +} + +void StreamingMovingAverageFilter::reset_batch() { + this->sum_ = 0.0f; + this->valid_count_ = 0; +} + } // namespace sensor } // namespace esphome diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index b391048521..c9b39b73c3 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -504,5 +504,81 @@ class ToNTCTemperatureFilter : public Filter { double c_; }; +/** Base class for streaming filters (batch windows where window_size == send_every). + * + * When window_size equals send_every, we don't need a sliding window. + * This base class handles the common batching logic. + */ +class StreamingFilter : public Filter { + public: + StreamingFilter(size_t window_size, size_t send_first_at); + + optional new_value(float value) final; + + protected: + /// Called by new_value() to process each value in the batch + virtual void process_value(float value) = 0; + + /// Called by new_value() to compute the result after collecting window_size values + virtual float compute_batch_result() = 0; + + /// Called by new_value() to reset internal state after sending a result + virtual void reset_batch() = 0; + + size_t window_size_; + size_t count_{0}; + size_t send_first_at_; + bool first_send_{true}; +}; + +/** Streaming min filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only the minimum value. + */ +class StreamingMinFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float current_min_{NAN}; +}; + +/** Streaming max filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only the maximum value. + */ +class StreamingMaxFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float current_max_{NAN}; +}; + +/** Streaming moving average filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only sum and count. + */ +class StreamingMovingAverageFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float sum_{0.0f}; + size_t valid_count_{0}; +}; + } // namespace sensor } // namespace esphome From a72c494b758389b145191d5b1827e705d0c8e1ea Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:23:01 -1000 Subject: [PATCH 21/31] tweak --- esphome/components/sensor/__init__.py | 68 ++++++++------------------- 1 file changed, 20 insertions(+), 48 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index 1585a6342f..ec5bf1364d 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -453,8 +453,12 @@ async def skip_initial_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config) -@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) -async def min_filter_to_code(config, filter_id): +def _create_sliding_window_filter(config, filter_id, sliding_class, streaming_class): + """Helper to create sliding window or streaming filter based on config. + + When window_size == send_every, use streaming filter (O(1) memory). + Otherwise, use sliding window filter (O(n) memory). + """ window_size = config[CONF_WINDOW_SIZE] send_every = config[CONF_SEND_EVERY] send_first_at = config[CONF_SEND_FIRST_AT] @@ -462,17 +466,14 @@ async def min_filter_to_code(config, filter_id): # Optimization: Use streaming filter for batch windows (window_size == send_every) # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) if window_size == send_every: - return cg.new_Pvariable( - filter_id, - StreamingMinFilter, - window_size, - send_first_at, - ) - return cg.new_Pvariable( - filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], + return cg.new_Pvariable(filter_id, streaming_class, window_size, send_first_at) + return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) + + +@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) +async def min_filter_to_code(config, filter_id): + return _create_sliding_window_filter( + config, filter_id, MinFilter, StreamingMinFilter ) @@ -490,24 +491,8 @@ MAX_SCHEMA = cv.All( @FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): - window_size = config[CONF_WINDOW_SIZE] - send_every = config[CONF_SEND_EVERY] - send_first_at = config[CONF_SEND_FIRST_AT] - - # Optimization: Use streaming filter for batch windows (window_size == send_every) - # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) - if window_size == send_every: - return cg.new_Pvariable( - filter_id, - StreamingMaxFilter, - window_size, - send_first_at, - ) - return cg.new_Pvariable( - filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], + return _create_sliding_window_filter( + config, filter_id, MaxFilter, StreamingMaxFilter ) @@ -529,24 +514,11 @@ SLIDING_AVERAGE_SCHEMA = cv.All( SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): - window_size = config[CONF_WINDOW_SIZE] - send_every = config[CONF_SEND_EVERY] - send_first_at = config[CONF_SEND_FIRST_AT] - - # Optimization: Use streaming filter for batch windows (window_size == send_every) - # Saves 99.94% memory for large windows (e.g., 20KB → 12 bytes for window_size=5000) - if window_size == send_every: - return cg.new_Pvariable( - filter_id, - StreamingMovingAverageFilter, - window_size, - send_first_at, - ) - return cg.new_Pvariable( + return _create_sliding_window_filter( + config, filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], + SlidingWindowMovingAverageFilter, + StreamingMovingAverageFilter, ) From 5a8558e1c557cd4308264d9d18d027d464886791 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:23:35 -1000 Subject: [PATCH 22/31] tweak --- esphome/components/sensor/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index ec5bf1364d..738ccf2ac6 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -467,7 +467,9 @@ def _create_sliding_window_filter(config, filter_id, sliding_class, streaming_cl # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) if window_size == send_every: return cg.new_Pvariable(filter_id, streaming_class, window_size, send_first_at) - return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) + return cg.new_Pvariable( + filter_id, sliding_class, window_size, send_every, send_first_at + ) @FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) From 589c25e65a3be2bf8d192fd8c580357d4e2241be Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:24:44 -1000 Subject: [PATCH 23/31] tweak --- esphome/components/sensor/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index 738ccf2ac6..b2c81f6239 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -472,7 +472,7 @@ def _create_sliding_window_filter(config, filter_id, sliding_class, streaming_cl ) -@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) +@FILTER_REGISTRY.register("min", Filter, MIN_SCHEMA) async def min_filter_to_code(config, filter_id): return _create_sliding_window_filter( config, filter_id, MinFilter, StreamingMinFilter @@ -491,7 +491,7 @@ MAX_SCHEMA = cv.All( ) -@FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) +@FILTER_REGISTRY.register("max", Filter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): return _create_sliding_window_filter( config, filter_id, MaxFilter, StreamingMaxFilter @@ -512,7 +512,7 @@ SLIDING_AVERAGE_SCHEMA = cv.All( @FILTER_REGISTRY.register( "sliding_window_moving_average", - SlidingWindowMovingAverageFilter, + Filter, SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): From 92d54ffb09131521432ffa6e22be9b81c10cb6e7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:28:51 -1000 Subject: [PATCH 24/31] tweak --- esphome/components/sensor/__init__.py | 60 +++++++++++++-------------- 1 file changed, 29 insertions(+), 31 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index b2c81f6239..feb7d0374d 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -453,30 +453,19 @@ async def skip_initial_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config) -def _create_sliding_window_filter(config, filter_id, sliding_class, streaming_class): - """Helper to create sliding window or streaming filter based on config. - - When window_size == send_every, use streaming filter (O(1) memory). - Otherwise, use sliding window filter (O(n) memory). - """ - window_size = config[CONF_WINDOW_SIZE] - send_every = config[CONF_SEND_EVERY] - send_first_at = config[CONF_SEND_FIRST_AT] +@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) +async def min_filter_to_code(config, filter_id): + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] # Optimization: Use streaming filter for batch windows (window_size == send_every) # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) if window_size == send_every: - return cg.new_Pvariable(filter_id, streaming_class, window_size, send_first_at) - return cg.new_Pvariable( - filter_id, sliding_class, window_size, send_every, send_first_at - ) - - -@FILTER_REGISTRY.register("min", Filter, MIN_SCHEMA) -async def min_filter_to_code(config, filter_id): - return _create_sliding_window_filter( - config, filter_id, MinFilter, StreamingMinFilter - ) + # Use streaming filter - O(1) memory instead of O(n) + return cg.Pvariable(filter_id, StreamingMinFilter, window_size, send_first_at) + # Use sliding window filter - maintains ring buffer + return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) MAX_SCHEMA = cv.All( @@ -491,11 +480,16 @@ MAX_SCHEMA = cv.All( ) -@FILTER_REGISTRY.register("max", Filter, MAX_SCHEMA) +@FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): - return _create_sliding_window_filter( - config, filter_id, MaxFilter, StreamingMaxFilter - ) + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + if window_size == send_every: + return cg.Pvariable(filter_id, StreamingMaxFilter, window_size, send_first_at) + return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) SLIDING_AVERAGE_SCHEMA = cv.All( @@ -512,16 +506,20 @@ SLIDING_AVERAGE_SCHEMA = cv.All( @FILTER_REGISTRY.register( "sliding_window_moving_average", - Filter, + SlidingWindowMovingAverageFilter, SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): - return _create_sliding_window_filter( - config, - filter_id, - SlidingWindowMovingAverageFilter, - StreamingMovingAverageFilter, - ) + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + if window_size == send_every: + return cg.Pvariable( + filter_id, StreamingMovingAverageFilter, window_size, send_first_at + ) + return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) EXPONENTIAL_AVERAGE_SCHEMA = cv.All( From a999349fa5726720ffd0a8e3f098ad937ba4d17f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:29:55 -1000 Subject: [PATCH 25/31] tweak --- esphome/components/sensor/__init__.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index feb7d0374d..a7a92d3968 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -463,7 +463,8 @@ async def min_filter_to_code(config, filter_id): # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) if window_size == send_every: # Use streaming filter - O(1) memory instead of O(n) - return cg.Pvariable(filter_id, StreamingMinFilter, window_size, send_first_at) + rhs = cg.new_Pvariable(StreamingMinFilter, window_size, send_first_at) + return cg.Pvariable(filter_id, rhs) # Use sliding window filter - maintains ring buffer return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) @@ -488,7 +489,8 @@ async def max_filter_to_code(config, filter_id): # Optimization: Use streaming filter for batch windows (window_size == send_every) if window_size == send_every: - return cg.Pvariable(filter_id, StreamingMaxFilter, window_size, send_first_at) + rhs = cg.new_Pvariable(StreamingMaxFilter, window_size, send_first_at) + return cg.Pvariable(filter_id, rhs) return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) @@ -516,9 +518,8 @@ async def sliding_window_moving_average_filter_to_code(config, filter_id): # Optimization: Use streaming filter for batch windows (window_size == send_every) if window_size == send_every: - return cg.Pvariable( - filter_id, StreamingMovingAverageFilter, window_size, send_first_at - ) + rhs = cg.new_Pvariable(StreamingMovingAverageFilter, window_size, send_first_at) + return cg.Pvariable(filter_id, rhs) return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) From f75f11b550b50089c57b25c1f939da3457ec9744 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:57:29 -1000 Subject: [PATCH 26/31] add --- esphome/components/sensor/__init__.py | 27 ++++--- .../fixtures/sensor_filters_batch_window.yaml | 58 ++++++++++++++ .../fixtures/sensor_filters_ring_buffer.yaml | 75 +++++++++++++++++++ 3 files changed, 148 insertions(+), 12 deletions(-) create mode 100644 tests/integration/fixtures/sensor_filters_batch_window.yaml create mode 100644 tests/integration/fixtures/sensor_filters_ring_buffer.yaml diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index a7a92d3968..0538531354 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -453,7 +453,7 @@ async def skip_initial_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config) -@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) +@FILTER_REGISTRY.register("min", Filter, MIN_SCHEMA) async def min_filter_to_code(config, filter_id): window_size: int = config[CONF_WINDOW_SIZE] send_every: int = config[CONF_SEND_EVERY] @@ -463,10 +463,11 @@ async def min_filter_to_code(config, filter_id): # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) if window_size == send_every: # Use streaming filter - O(1) memory instead of O(n) - rhs = cg.new_Pvariable(StreamingMinFilter, window_size, send_first_at) - return cg.Pvariable(filter_id, rhs) + rhs = StreamingMinFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMinFilter) # Use sliding window filter - maintains ring buffer - return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) + rhs = MinFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, MinFilter) MAX_SCHEMA = cv.All( @@ -481,7 +482,7 @@ MAX_SCHEMA = cv.All( ) -@FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) +@FILTER_REGISTRY.register("max", Filter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): window_size: int = config[CONF_WINDOW_SIZE] send_every: int = config[CONF_SEND_EVERY] @@ -489,9 +490,10 @@ async def max_filter_to_code(config, filter_id): # Optimization: Use streaming filter for batch windows (window_size == send_every) if window_size == send_every: - rhs = cg.new_Pvariable(StreamingMaxFilter, window_size, send_first_at) - return cg.Pvariable(filter_id, rhs) - return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) + rhs = StreamingMaxFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMaxFilter) + rhs = MaxFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, MaxFilter) SLIDING_AVERAGE_SCHEMA = cv.All( @@ -508,7 +510,7 @@ SLIDING_AVERAGE_SCHEMA = cv.All( @FILTER_REGISTRY.register( "sliding_window_moving_average", - SlidingWindowMovingAverageFilter, + Filter, SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): @@ -518,9 +520,10 @@ async def sliding_window_moving_average_filter_to_code(config, filter_id): # Optimization: Use streaming filter for batch windows (window_size == send_every) if window_size == send_every: - rhs = cg.new_Pvariable(StreamingMovingAverageFilter, window_size, send_first_at) - return cg.Pvariable(filter_id, rhs) - return cg.new_Pvariable(filter_id, window_size, send_every, send_first_at) + rhs = StreamingMovingAverageFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMovingAverageFilter) + rhs = SlidingWindowMovingAverageFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, SlidingWindowMovingAverageFilter) EXPONENTIAL_AVERAGE_SCHEMA = cv.All( diff --git a/tests/integration/fixtures/sensor_filters_batch_window.yaml b/tests/integration/fixtures/sensor_filters_batch_window.yaml new file mode 100644 index 0000000000..58a254c215 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_batch_window.yaml @@ -0,0 +1,58 @@ +esphome: + name: test-batch-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # Batch window filters (window_size == send_every) - use streaming filters + - platform: copy + source_id: source_sensor + name: "Min Sensor" + id: min_sensor + filters: + - min: + window_size: 5 + send_every: 5 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Max Sensor" + id: max_sensor + filters: + - max: + window_size: 5 + send_every: 5 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Moving Avg Sensor" + id: moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 5 + send_first_at: 1 + +# Button to trigger publishing test values +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - lambda: |- + // Publish 10 values: 1.0, 2.0, ..., 10.0 + for (int i = 1; i <= 10; i++) { + id(source_sensor).publish_state(float(i)); + } diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml new file mode 100644 index 0000000000..0d603ee9ce --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml @@ -0,0 +1,75 @@ +esphome: + name: test-sliding-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # ACTUAL sliding window filters (window_size != send_every) - use ring buffers + # Window of 5, send every 2 values + - platform: copy + source_id: source_sensor + name: "Sliding Min Sensor" + id: sliding_min_sensor + filters: + - min: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Max Sensor" + id: sliding_max_sensor + filters: + - max: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Median Sensor" + id: sliding_median_sensor + filters: + - median: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Moving Avg Sensor" + id: sliding_moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 2 + send_first_at: 1 + +# Button to trigger publishing test values +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - lambda: |- + // Publish 10 values: 1.0, 2.0, ..., 10.0 + // With window_size=5, send_every=2, send_first_at=1: + // - Output at position 1: window=[1], min=1, max=1, median=1, avg=1 + // - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2 + // - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3 + // - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5 + // - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7 + for (int i = 1; i <= 10; i++) { + id(source_sensor).publish_state(float(i)); + } From 855df423ee8c03d30d9a0d5430edbb09c59e5bd7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 19:58:18 -1000 Subject: [PATCH 27/31] add --- .../test_sensor_filters_ring_buffer.py | 163 ++++++++++++++++++ 1 file changed, 163 insertions(+) create mode 100644 tests/integration/test_sensor_filters_ring_buffer.py diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py new file mode 100644 index 0000000000..e138f93e7e --- /dev/null +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -0,0 +1,163 @@ +"""Test sensor ring buffer filter functionality (window_size != send_every).""" + +from __future__ import annotations + +import asyncio + +from aioesphomeapi import EntityInfo, EntityState, SensorState +import pytest + +from .types import APIClientConnectedFactory, RunCompiledFunction + + +def build_key_to_sensor_mapping( + entities: list[EntityInfo], sensor_names: list[str] +) -> dict[int, str]: + """Build a mapping from entity keys to sensor names. + + Args: + entities: List of entity info objects from the API + sensor_names: List of sensor names to search for in object_ids + + Returns: + Dictionary mapping entity keys to sensor names + """ + key_to_sensor: dict[int, str] = {} + for entity in entities: + obj_id = entity.object_id.lower() + for sensor_name in sensor_names: + if sensor_name in obj_id: + key_to_sensor[entity.key] = sensor_name + break + return key_to_sensor + + +@pytest.mark.asyncio +async def test_sensor_filters_ring_buffer( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ring buffer filters (window_size != send_every) work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for each sensor + sensor_states: dict[str, list[float]] = { + "sliding_min": [], + "sliding_max": [], + "sliding_median": [], + "sliding_moving_avg": [], + } + + # Futures to track when we receive expected values + all_updates_received = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values (initial states) + if state.missing_state: + return + + # Get the sensor name from the key mapping + sensor_name = key_to_sensor.get(state.key) + if not sensor_name or sensor_name not in sensor_states: + return + + sensor_states[sensor_name].append(state.state) + + # Check if we've received enough updates from all sensors + # With send_every=2, send_first_at=1, we expect 5 outputs per sensor + if ( + len(sensor_states["sliding_min"]) >= 5 + and len(sensor_states["sliding_max"]) >= 5 + and len(sensor_states["sliding_median"]) >= 5 + and len(sensor_states["sliding_moving_avg"]) >= 5 + and not all_updates_received.done() + ): + all_updates_received.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_sensor_mapping( + entities, + [ + "sliding_min", + "sliding_max", + "sliding_median", + "sliding_moving_avg", + ], + ) + + # Subscribe to state changes AFTER building mapping + client.subscribe_states(on_state) + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Values Button not found" + + # Press the button to publish test values + client.button_command(publish_button.key) + + # Wait for all sensors to receive their values + try: + await asyncio.wait_for(all_updates_received, timeout=10.0) + except TimeoutError: + # Provide detailed failure info + pytest.fail( + f"Timeout waiting for updates. Received states:\n" + f" min: {sensor_states['sliding_min']}\n" + f" max: {sensor_states['sliding_max']}\n" + f" median: {sensor_states['sliding_median']}\n" + f" moving_avg: {sensor_states['sliding_moving_avg']}" + ) + + # Verify we got 5 outputs per sensor (positions 1, 3, 5, 7, 9) + assert len(sensor_states["sliding_min"]) == 5, ( + f"Min sensor should have 5 values, got {len(sensor_states['sliding_min'])}: {sensor_states['sliding_min']}" + ) + assert len(sensor_states["sliding_max"]) == 5 + assert len(sensor_states["sliding_median"]) == 5 + assert len(sensor_states["sliding_moving_avg"]) == 5 + + # Verify the values at each output position + # Position 1: window=[1] + assert abs(sensor_states["sliding_min"][0] - 1.0) < 0.01 + assert abs(sensor_states["sliding_max"][0] - 1.0) < 0.01 + assert abs(sensor_states["sliding_median"][0] - 1.0) < 0.01 + assert abs(sensor_states["sliding_moving_avg"][0] - 1.0) < 0.01 + + # Position 3: window=[1,2,3] + assert abs(sensor_states["sliding_min"][1] - 1.0) < 0.01 + assert abs(sensor_states["sliding_max"][1] - 3.0) < 0.01 + assert abs(sensor_states["sliding_median"][1] - 2.0) < 0.01 + assert abs(sensor_states["sliding_moving_avg"][1] - 2.0) < 0.01 + + # Position 5: window=[1,2,3,4,5] + assert abs(sensor_states["sliding_min"][2] - 1.0) < 0.01 + assert abs(sensor_states["sliding_max"][2] - 5.0) < 0.01 + assert abs(sensor_states["sliding_median"][2] - 3.0) < 0.01 + assert abs(sensor_states["sliding_moving_avg"][2] - 3.0) < 0.01 + + # Position 7: window=[3,4,5,6,7] (ring buffer wrapped) + assert abs(sensor_states["sliding_min"][3] - 3.0) < 0.01 + assert abs(sensor_states["sliding_max"][3] - 7.0) < 0.01 + assert abs(sensor_states["sliding_median"][3] - 5.0) < 0.01 + assert abs(sensor_states["sliding_moving_avg"][3] - 5.0) < 0.01 + + # Position 9: window=[5,6,7,8,9] (ring buffer wrapped) + assert abs(sensor_states["sliding_min"][4] - 5.0) < 0.01 + assert abs(sensor_states["sliding_max"][4] - 9.0) < 0.01 + assert abs(sensor_states["sliding_median"][4] - 7.0) < 0.01 + assert abs(sensor_states["sliding_moving_avg"][4] - 7.0) < 0.01 From 784183ca8d25cf1be8129824755f9fff55d7764e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 21:38:02 -1000 Subject: [PATCH 28/31] [datetime] Fix DateTimeStateTrigger compilation when time component is not used --- esphome/components/datetime/datetime_base.h | 2 -- 1 file changed, 2 deletions(-) diff --git a/esphome/components/datetime/datetime_base.h b/esphome/components/datetime/datetime_base.h index b7645f5539..b5f54ac96f 100644 --- a/esphome/components/datetime/datetime_base.h +++ b/esphome/components/datetime/datetime_base.h @@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase { #endif }; -#ifdef USE_TIME class DateTimeStateTrigger : public Trigger { public: explicit DateTimeStateTrigger(DateTimeBase *parent) { parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); }); } }; -#endif } // namespace datetime } // namespace esphome From 7027ae983319c24071b8cfc3c3b0b62c6beddc7d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 21:44:38 -1000 Subject: [PATCH 29/31] race --- .../fixtures/sensor_filters_nan_handling.yaml | 11 +++++++++++ .../fixtures/sensor_filters_ring_buffer.yaml | 2 ++ .../sensor_filters_ring_buffer_wraparound.yaml | 2 ++ .../fixtures/sensor_filters_sliding_window.yaml | 2 ++ 4 files changed, 17 insertions(+) diff --git a/tests/integration/fixtures/sensor_filters_nan_handling.yaml b/tests/integration/fixtures/sensor_filters_nan_handling.yaml index 20fae64e8b..5fc3d1db29 100644 --- a/tests/integration/fixtures/sensor_filters_nan_handling.yaml +++ b/tests/integration/fixtures/sensor_filters_nan_handling.yaml @@ -38,13 +38,24 @@ button: on_press: - lambda: |- // Publish 10 values with NaN mixed in: 10, NaN, 5, NaN, 15, 8, NaN, 12, 3, NaN + // Small delay to ensure API can process each state update id(source_nan_sensor).publish_state(10.0); + delay(10); id(source_nan_sensor).publish_state(NAN); + delay(10); id(source_nan_sensor).publish_state(5.0); + delay(10); id(source_nan_sensor).publish_state(NAN); + delay(10); id(source_nan_sensor).publish_state(15.0); + delay(10); id(source_nan_sensor).publish_state(8.0); + delay(10); id(source_nan_sensor).publish_state(NAN); + delay(10); id(source_nan_sensor).publish_state(12.0); + delay(10); id(source_nan_sensor).publish_state(3.0); + delay(10); id(source_nan_sensor).publish_state(NAN); + delay(10); diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml index 0d603ee9ce..fb502dc999 100644 --- a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml +++ b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml @@ -70,6 +70,8 @@ button: // - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3 // - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5 // - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7 + // Small delay to ensure API can process each state update for (int i = 1; i <= 10; i++) { id(source_sensor).publish_state(float(i)); + delay(10); } diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml index 1ff9ec542a..ec8917c2e2 100644 --- a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml +++ b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml @@ -30,7 +30,9 @@ button: - lambda: |- // Publish 9 values to test ring buffer wraparound // Values: 10, 20, 30, 5, 25, 15, 40, 35, 20 + // Small delay to ensure API can process each state update float values[] = {10.0, 20.0, 30.0, 5.0, 25.0, 15.0, 40.0, 35.0, 20.0}; for (int i = 0; i < 9; i++) { id(source_wraparound).publish_state(values[i]); + delay(10); } diff --git a/tests/integration/fixtures/sensor_filters_sliding_window.yaml b/tests/integration/fixtures/sensor_filters_sliding_window.yaml index a2ae3182b8..2b58477aa9 100644 --- a/tests/integration/fixtures/sensor_filters_sliding_window.yaml +++ b/tests/integration/fixtures/sensor_filters_sliding_window.yaml @@ -73,6 +73,8 @@ button: on_press: - lambda: |- // Publish 10 values: 1.0, 2.0, ..., 10.0 + // Small delay to ensure API can process each state update for (int i = 1; i <= 10; i++) { id(source_sensor).publish_state(float(i)); + delay(10); } From 55e03036e23ee87ccca4d60d9002093d998e6cf9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 21:46:00 -1000 Subject: [PATCH 30/31] preen --- tests/integration/fixtures/sensor_filters_nan_handling.yaml | 2 ++ .../fixtures/sensor_filters_ring_buffer_wraparound.yaml | 1 + .../integration/fixtures/sensor_filters_sliding_window.yaml | 5 +++++ 3 files changed, 8 insertions(+) diff --git a/tests/integration/fixtures/sensor_filters_nan_handling.yaml b/tests/integration/fixtures/sensor_filters_nan_handling.yaml index 5fc3d1db29..445d20497b 100644 --- a/tests/integration/fixtures/sensor_filters_nan_handling.yaml +++ b/tests/integration/fixtures/sensor_filters_nan_handling.yaml @@ -21,6 +21,7 @@ sensor: - min: window_size: 5 send_every: 5 + send_first_at: 1 - platform: copy source_id: source_nan_sensor @@ -30,6 +31,7 @@ sensor: - max: window_size: 5 send_every: 5 + send_first_at: 1 button: - platform: template diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml index ec8917c2e2..4757d78aeb 100644 --- a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml +++ b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml @@ -21,6 +21,7 @@ sensor: - min: window_size: 3 send_every: 3 + send_first_at: 1 button: - platform: template diff --git a/tests/integration/fixtures/sensor_filters_sliding_window.yaml b/tests/integration/fixtures/sensor_filters_sliding_window.yaml index 2b58477aa9..edcc596f64 100644 --- a/tests/integration/fixtures/sensor_filters_sliding_window.yaml +++ b/tests/integration/fixtures/sensor_filters_sliding_window.yaml @@ -23,6 +23,7 @@ sensor: - min: window_size: 5 send_every: 5 + send_first_at: 1 # Max filter sensor - platform: copy @@ -33,6 +34,7 @@ sensor: - max: window_size: 5 send_every: 5 + send_first_at: 1 # Median filter sensor - platform: copy @@ -43,6 +45,7 @@ sensor: - median: window_size: 5 send_every: 5 + send_first_at: 1 # Quantile filter sensor (90th percentile) - platform: copy @@ -53,6 +56,7 @@ sensor: - quantile: window_size: 5 send_every: 5 + send_first_at: 1 quantile: 0.9 # Moving average filter sensor @@ -64,6 +68,7 @@ sensor: - sliding_window_moving_average: window_size: 5 send_every: 5 + send_first_at: 1 # Button to trigger publishing test values button: From baf117b411fbd43edc66d0ead545ad22b1e62961 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 22:03:22 -1000 Subject: [PATCH 31/31] fix flakey test --- .../fixtures/sensor_filters_nan_handling.yaml | 67 ++++++++++++------- .../fixtures/sensor_filters_ring_buffer.yaml | 64 ++++++++++++++---- ...sensor_filters_ring_buffer_wraparound.yaml | 51 +++++++++++--- .../sensor_filters_sliding_window.yaml | 52 ++++++++++++-- 4 files changed, 182 insertions(+), 52 deletions(-) diff --git a/tests/integration/fixtures/sensor_filters_nan_handling.yaml b/tests/integration/fixtures/sensor_filters_nan_handling.yaml index 445d20497b..fcb12cfde5 100644 --- a/tests/integration/fixtures/sensor_filters_nan_handling.yaml +++ b/tests/integration/fixtures/sensor_filters_nan_handling.yaml @@ -33,31 +33,52 @@ sensor: send_every: 5 send_first_at: 1 +script: + - id: publish_nan_values_script + then: + - sensor.template.publish: + id: source_nan_sensor + state: 10.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 15.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 12.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + button: - platform: template name: "Publish NaN Values Button" id: publish_nan_button on_press: - - lambda: |- - // Publish 10 values with NaN mixed in: 10, NaN, 5, NaN, 15, 8, NaN, 12, 3, NaN - // Small delay to ensure API can process each state update - id(source_nan_sensor).publish_state(10.0); - delay(10); - id(source_nan_sensor).publish_state(NAN); - delay(10); - id(source_nan_sensor).publish_state(5.0); - delay(10); - id(source_nan_sensor).publish_state(NAN); - delay(10); - id(source_nan_sensor).publish_state(15.0); - delay(10); - id(source_nan_sensor).publish_state(8.0); - delay(10); - id(source_nan_sensor).publish_state(NAN); - delay(10); - id(source_nan_sensor).publish_state(12.0); - delay(10); - id(source_nan_sensor).publish_state(3.0); - delay(10); - id(source_nan_sensor).publish_state(NAN); - delay(10); + - script.execute: publish_nan_values_script diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml index fb502dc999..ea7a326b8d 100644 --- a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml +++ b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml @@ -57,21 +57,59 @@ sensor: send_first_at: 1 # Button to trigger publishing test values +script: + - id: publish_values_script + then: + # Publish 10 values: 1.0, 2.0, ..., 10.0 + # With window_size=5, send_every=2, send_first_at=1: + # - Output at position 1: window=[1], min=1, max=1, median=1, avg=1 + # - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2 + # - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3 + # - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5 + # - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7 + - sensor.template.publish: + id: source_sensor + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 4.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 6.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 7.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 9.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 10.0 + button: - platform: template name: "Publish Values Button" id: publish_button on_press: - - lambda: |- - // Publish 10 values: 1.0, 2.0, ..., 10.0 - // With window_size=5, send_every=2, send_first_at=1: - // - Output at position 1: window=[1], min=1, max=1, median=1, avg=1 - // - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2 - // - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3 - // - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5 - // - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7 - // Small delay to ensure API can process each state update - for (int i = 1; i <= 10; i++) { - id(source_sensor).publish_state(float(i)); - delay(10); - } + - script.execute: publish_values_script diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml index 4757d78aeb..bd5980160b 100644 --- a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml +++ b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml @@ -23,17 +23,50 @@ sensor: send_every: 3 send_first_at: 1 +script: + - id: publish_wraparound_script + then: + # Publish 9 values to test ring buffer wraparound + # Values: 10, 20, 30, 5, 25, 15, 40, 35, 20 + - sensor.template.publish: + id: source_wraparound + state: 10.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 20.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 30.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 25.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 15.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 40.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 35.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 20.0 + button: - platform: template name: "Publish Wraparound Button" id: publish_wraparound_button on_press: - - lambda: |- - // Publish 9 values to test ring buffer wraparound - // Values: 10, 20, 30, 5, 25, 15, 40, 35, 20 - // Small delay to ensure API can process each state update - float values[] = {10.0, 20.0, 30.0, 5.0, 25.0, 15.0, 40.0, 35.0, 20.0}; - for (int i = 0; i < 9; i++) { - id(source_wraparound).publish_state(values[i]); - delay(10); - } + - script.execute: publish_wraparound_script diff --git a/tests/integration/fixtures/sensor_filters_sliding_window.yaml b/tests/integration/fixtures/sensor_filters_sliding_window.yaml index edcc596f64..2055118811 100644 --- a/tests/integration/fixtures/sensor_filters_sliding_window.yaml +++ b/tests/integration/fixtures/sensor_filters_sliding_window.yaml @@ -70,16 +70,54 @@ sensor: send_every: 5 send_first_at: 1 +# Script to publish values with delays +script: + - id: publish_values_script + then: + - sensor.template.publish: + id: source_sensor + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 4.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 6.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 7.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 9.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 10.0 + # Button to trigger publishing test values button: - platform: template name: "Publish Values Button" id: publish_button on_press: - - lambda: |- - // Publish 10 values: 1.0, 2.0, ..., 10.0 - // Small delay to ensure API can process each state update - for (int i = 1; i <= 10; i++) { - id(source_sensor).publish_state(float(i)); - delay(10); - } + - script.execute: publish_values_script