From c4eeed7f7e28f5590ea5eaab365a01bcede20fb0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:05:02 -1000 Subject: [PATCH 001/336] [ci] Automatic Flash/RAM impact analysis --- .github/workflows/memory-impact.yml | 150 +++++++++++++++++ script/ci_helpers.py | 23 +++ script/ci_memory_impact_comment.py | 244 ++++++++++++++++++++++++++++ script/ci_memory_impact_detector.py | 134 +++++++++++++++ script/ci_memory_impact_extract.py | 104 ++++++++++++ 5 files changed, 655 insertions(+) create mode 100644 .github/workflows/memory-impact.yml create mode 100755 script/ci_helpers.py create mode 100755 script/ci_memory_impact_comment.py create mode 100755 script/ci_memory_impact_detector.py create mode 100755 script/ci_memory_impact_extract.py diff --git a/.github/workflows/memory-impact.yml b/.github/workflows/memory-impact.yml new file mode 100644 index 0000000000..dff73e6cd7 --- /dev/null +++ b/.github/workflows/memory-impact.yml @@ -0,0 +1,150 @@ +--- +name: Memory Impact Analysis + +on: + pull_request: + paths: + - "esphome/components/**" + - "esphome/core/**" + +permissions: + contents: read + pull-requests: write + +env: + DEFAULT_PYTHON: "3.11" + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + detect-single-component: + name: Detect single component change + runs-on: ubuntu-24.04 + outputs: + should_run: ${{ steps.detect.outputs.should_run }} + component: ${{ steps.detect.outputs.component }} + test_file: ${{ steps.detect.outputs.test_file }} + platform: ${{ steps.detect.outputs.platform }} + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install PyYAML + - name: Detect single component change + id: detect + run: | + python script/ci_memory_impact_detector.py + + build-target-branch: + name: Build target branch + runs-on: ubuntu-24.04 + needs: detect-single-component + if: needs.detect-single-component.outputs.should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out target branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + ref: ${{ github.base_ref }} + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + - name: Install ESPHome + run: | + pip install -e . + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ needs.detect-single-component.outputs.platform }}-${{ hashFiles('platformio.ini') }} + - name: Compile test configuration and extract memory usage + id: extract + run: | + component="${{ needs.detect-single-component.outputs.component }}" + platform="${{ needs.detect-single-component.outputs.platform }}" + test_file="${{ needs.detect-single-component.outputs.test_file }}" + + echo "Compiling $component for $platform using $test_file" + python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env + + build-pr-branch: + name: Build PR branch + runs-on: ubuntu-24.04 + needs: detect-single-component + if: needs.detect-single-component.outputs.should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out PR branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + - name: Install ESPHome + run: | + pip install -e . + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ needs.detect-single-component.outputs.platform }}-${{ hashFiles('platformio.ini') }} + - name: Compile test configuration and extract memory usage + id: extract + run: | + component="${{ needs.detect-single-component.outputs.component }}" + platform="${{ needs.detect-single-component.outputs.platform }}" + test_file="${{ needs.detect-single-component.outputs.test_file }}" + + echo "Compiling $component for $platform using $test_file" + python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env + + comment-results: + name: Comment memory impact + runs-on: ubuntu-24.04 + needs: + - detect-single-component + - build-target-branch + - build-pr-branch + if: needs.detect-single-component.outputs.should_run == 'true' + steps: + - name: Check out code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Set up Python ${{ env.DEFAULT_PYTHON }} + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + with: + python-version: ${{ env.DEFAULT_PYTHON }} + - name: Post or update PR comment + env: + GH_TOKEN: ${{ github.token }} + COMPONENT: ${{ needs.detect-single-component.outputs.component }} + PLATFORM: ${{ needs.detect-single-component.outputs.platform }} + TARGET_RAM: ${{ needs.build-target-branch.outputs.ram_usage }} + TARGET_FLASH: ${{ needs.build-target-branch.outputs.flash_usage }} + PR_RAM: ${{ needs.build-pr-branch.outputs.ram_usage }} + PR_FLASH: ${{ needs.build-pr-branch.outputs.flash_usage }} + run: | + python script/ci_memory_impact_comment.py \ + --pr-number "${{ github.event.pull_request.number }}" \ + --component "$COMPONENT" \ + --platform "$PLATFORM" \ + --target-ram "$TARGET_RAM" \ + --target-flash "$TARGET_FLASH" \ + --pr-ram "$PR_RAM" \ + --pr-flash "$PR_FLASH" diff --git a/script/ci_helpers.py b/script/ci_helpers.py new file mode 100755 index 0000000000..48b0e4bbfe --- /dev/null +++ b/script/ci_helpers.py @@ -0,0 +1,23 @@ +"""Common helper functions for CI scripts.""" + +from __future__ import annotations + +import os + + +def write_github_output(outputs: dict[str, str | int]) -> None: + """Write multiple outputs to GITHUB_OUTPUT or stdout. + + When running in GitHub Actions, writes to the GITHUB_OUTPUT file. + When running locally, writes to stdout for debugging. + + Args: + outputs: Dictionary of key-value pairs to write + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output: + with open(github_output, "a", encoding="utf-8") as f: + f.writelines(f"{key}={value}\n" for key, value in outputs.items()) + else: + for key, value in outputs.items(): + print(f"{key}={value}") diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py new file mode 100755 index 0000000000..69f703bd78 --- /dev/null +++ b/script/ci_memory_impact_comment.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python3 +"""Post or update a PR comment with memory impact analysis results. + +This script creates or updates a GitHub PR comment with memory usage changes. +It uses the GitHub CLI (gh) to manage comments and maintains a single comment +that gets updated on subsequent runs. +""" + +from __future__ import annotations + +import argparse +import json +import subprocess +import sys + +# Comment marker to identify our memory impact comments +COMMENT_MARKER = "" + + +def format_bytes(bytes_value: int) -> str: + """Format bytes value with appropriate unit. + + Args: + bytes_value: Number of bytes + + Returns: + Formatted string (e.g., "1.5 KB", "256 bytes") + """ + if bytes_value < 1024: + return f"{bytes_value} bytes" + if bytes_value < 1024 * 1024: + return f"{bytes_value / 1024:.2f} KB" + return f"{bytes_value / (1024 * 1024):.2f} MB" + + +def format_change(before: int, after: int) -> str: + """Format memory change with delta and percentage. + + Args: + before: Memory usage before change + after: Memory usage after change + + Returns: + Formatted string with delta and percentage + """ + delta = after - before + percentage = 0.0 if before == 0 else (delta / before) * 100 + + # Format delta with sign + delta_str = f"+{format_bytes(delta)}" if delta >= 0 else format_bytes(delta) + + # Format percentage with sign + if percentage > 0: + pct_str = f"+{percentage:.2f}%" + elif percentage < 0: + pct_str = f"{percentage:.2f}%" + else: + pct_str = "0.00%" + + # Add emoji indicator + if delta > 0: + emoji = "📈" + elif delta < 0: + emoji = "📉" + else: + emoji = "➡️" + + return f"{emoji} {delta_str} ({pct_str})" + + +def create_comment_body( + component: str, + platform: str, + target_ram: int, + target_flash: int, + pr_ram: int, + pr_flash: int, +) -> str: + """Create the comment body with memory impact analysis. + + Args: + component: Component name + platform: Platform name + target_ram: RAM usage in target branch + target_flash: Flash usage in target branch + pr_ram: RAM usage in PR branch + pr_flash: Flash usage in PR branch + + Returns: + Formatted comment body + """ + ram_change = format_change(target_ram, pr_ram) + flash_change = format_change(target_flash, pr_flash) + + return f"""{COMMENT_MARKER} +## Memory Impact Analysis + +**Component:** `{component}` +**Platform:** `{platform}` + +| Metric | Target Branch | This PR | Change | +|--------|--------------|---------|--------| +| **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} | +| **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | + +--- +*This analysis runs automatically when a single component changes. Memory usage is measured from a representative test configuration.* +""" + + +def find_existing_comment(pr_number: str) -> str | None: + """Find existing memory impact comment on the PR. + + Args: + pr_number: PR number + + Returns: + Comment ID if found, None otherwise + """ + try: + # List all comments on the PR + result = subprocess.run( + [ + "gh", + "pr", + "view", + pr_number, + "--json", + "comments", + "--jq", + ".comments[]", + ], + capture_output=True, + text=True, + check=True, + ) + + # Parse comments and look for our marker + for line in result.stdout.strip().split("\n"): + if not line: + continue + + try: + comment = json.loads(line) + if COMMENT_MARKER in comment.get("body", ""): + return str(comment["id"]) + except json.JSONDecodeError: + continue + + return None + + except subprocess.CalledProcessError as e: + print(f"Error finding existing comment: {e}", file=sys.stderr) + return None + + +def post_or_update_comment(pr_number: str, comment_body: str) -> bool: + """Post a new comment or update existing one. + + Args: + pr_number: PR number + comment_body: Comment body text + + Returns: + True if successful, False otherwise + """ + # Look for existing comment + existing_comment_id = find_existing_comment(pr_number) + + try: + if existing_comment_id: + # Update existing comment + print(f"Updating existing comment {existing_comment_id}", file=sys.stderr) + subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}", + "-X", + "PATCH", + "-f", + f"body={comment_body}", + ], + check=True, + capture_output=True, + ) + else: + # Post new comment + print("Posting new comment", file=sys.stderr) + subprocess.run( + ["gh", "pr", "comment", pr_number, "--body", comment_body], + check=True, + capture_output=True, + ) + + print("Comment posted/updated successfully", file=sys.stderr) + return True + + except subprocess.CalledProcessError as e: + print(f"Error posting/updating comment: {e}", file=sys.stderr) + if e.stderr: + print(f"stderr: {e.stderr.decode()}", file=sys.stderr) + return False + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Post or update PR comment with memory impact analysis" + ) + parser.add_argument("--pr-number", required=True, help="PR number") + parser.add_argument("--component", required=True, help="Component name") + parser.add_argument("--platform", required=True, help="Platform name") + parser.add_argument( + "--target-ram", type=int, required=True, help="Target branch RAM usage" + ) + parser.add_argument( + "--target-flash", type=int, required=True, help="Target branch flash usage" + ) + parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage") + parser.add_argument( + "--pr-flash", type=int, required=True, help="PR branch flash usage" + ) + + args = parser.parse_args() + + # Create comment body + comment_body = create_comment_body( + component=args.component, + platform=args.platform, + target_ram=args.target_ram, + target_flash=args.target_flash, + pr_ram=args.pr_ram, + pr_flash=args.pr_flash, + ) + + # Post or update comment + success = post_or_update_comment(args.pr_number, comment_body) + + return 0 if success else 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/ci_memory_impact_detector.py b/script/ci_memory_impact_detector.py new file mode 100755 index 0000000000..8c3045ab00 --- /dev/null +++ b/script/ci_memory_impact_detector.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +"""Detect if a PR changes exactly one component for memory impact analysis. + +This script is used by the CI workflow to determine if a PR should trigger +memory impact analysis. The analysis only runs when: +1. Exactly one component has changed (not counting core changes) +2. The component has at least one test configuration + +The script outputs GitHub Actions environment variables to control the workflow. +""" + +from __future__ import annotations + +from pathlib import Path +import sys + +# Add esphome to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position +from script.ci_helpers import write_github_output +from script.helpers import ESPHOME_COMPONENTS_PATH, changed_files + +# Platform preference order for memory impact analysis +# Ordered by production relevance and memory constraint importance +PLATFORM_PREFERENCE = [ + "esp32-idf", # Primary ESP32 IDF platform + "esp32-c3-idf", # ESP32-C3 IDF + "esp32-c6-idf", # ESP32-C6 IDF + "esp32-s2-idf", # ESP32-S2 IDF + "esp32-s3-idf", # ESP32-S3 IDF + "esp32-c2-idf", # ESP32-C2 IDF + "esp32-c5-idf", # ESP32-C5 IDF + "esp32-h2-idf", # ESP32-H2 IDF + "esp32-p4-idf", # ESP32-P4 IDF + "esp8266-ard", # ESP8266 Arduino (memory constrained) + "esp32-ard", # ESP32 Arduino + "esp32-c3-ard", # ESP32-C3 Arduino + "esp32-s2-ard", # ESP32-S2 Arduino + "esp32-s3-ard", # ESP32-S3 Arduino + "bk72xx-ard", # BK72xx Arduino + "rp2040-ard", # RP2040 Arduino + "nrf52-adafruit", # nRF52 Adafruit + "host", # Host platform (development/testing) +] + + +def find_test_for_component(component: str) -> tuple[str | None, str | None]: + """Find a test configuration for the given component. + + Prefers platforms based on PLATFORM_PREFERENCE order. + + Args: + component: Component name + + Returns: + Tuple of (test_file_name, platform) or (None, None) if no test found + """ + tests_dir = Path(__file__).parent.parent / "tests" / "components" / component + + if not tests_dir.exists(): + return None, None + + # Look for test files + test_files = list(tests_dir.glob("test.*.yaml")) + if not test_files: + return None, None + + # Try each preferred platform in order + for preferred_platform in PLATFORM_PREFERENCE: + for test_file in test_files: + parts = test_file.stem.split(".") + if len(parts) >= 2: + platform = parts[1] + if platform == preferred_platform: + return test_file.name, platform + + # Fall back to first test file + test_file = test_files[0] + parts = test_file.stem.split(".") + platform = parts[1] if len(parts) >= 2 else "esp32-idf" + return test_file.name, platform + + +def detect_single_component_change() -> None: + """Detect if exactly one component changed and output GitHub Actions variables.""" + files = changed_files() + + # Find all changed components (excluding core) + changed_components = set() + + for file in files: + if file.startswith(ESPHOME_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 3: + component = parts[2] + # Skip base bus components as they're used across many builds + if component not in ["i2c", "spi", "uart", "modbus"]: + changed_components.add(component) + + # Only proceed if exactly one component changed + if len(changed_components) != 1: + print( + f"Found {len(changed_components)} component(s) changed, skipping memory analysis" + ) + write_github_output({"should_run": "false"}) + return + + component = list(changed_components)[0] + print(f"Detected single component change: {component}") + + # Find a test configuration for this component + test_file, platform = find_test_for_component(component) + + if not test_file: + print(f"No test configuration found for {component}, skipping memory analysis") + write_github_output({"should_run": "false"}) + return + + print(f"Found test: {test_file} for platform: {platform}") + print("Memory impact analysis will run") + + write_github_output( + { + "should_run": "true", + "component": component, + "test_file": test_file, + "platform": platform, + } + ) + + +if __name__ == "__main__": + detect_single_component_change() diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py new file mode 100755 index 0000000000..9ddd39096f --- /dev/null +++ b/script/ci_memory_impact_extract.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 +"""Extract memory usage statistics from ESPHome build output. + +This script parses the PlatformIO build output to extract RAM and flash +usage statistics for a compiled component. It's used by the CI workflow to +compare memory usage between branches. + +The script reads compile output from stdin and looks for the standard +PlatformIO output format: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) +""" + +from __future__ import annotations + +import argparse +from pathlib import Path +import re +import sys + +# Add esphome to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position +from script.ci_helpers import write_github_output + + +def extract_from_compile_output(output_text: str) -> tuple[int | None, int | None]: + """Extract memory usage from PlatformIO compile output. + + Looks for lines like: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + + Args: + output_text: Compile output text + + Returns: + Tuple of (ram_bytes, flash_bytes) or (None, None) if not found + """ + ram_match = re.search( + r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text + ) + flash_match = re.search( + r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text + ) + + if ram_match and flash_match: + return int(ram_match.group(1)), int(flash_match.group(1)) + + return None, None + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Extract memory usage from ESPHome build output" + ) + parser.add_argument( + "--output-env", + action="store_true", + help="Output to GITHUB_OUTPUT environment file", + ) + + args = parser.parse_args() + + # Read compile output from stdin + compile_output = sys.stdin.read() + + # Extract memory usage + ram_bytes, flash_bytes = extract_from_compile_output(compile_output) + + if ram_bytes is None or flash_bytes is None: + print("Failed to extract memory usage from compile output", file=sys.stderr) + print("Expected lines like:", file=sys.stderr) + print( + " RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)", + file=sys.stderr, + ) + print( + " Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)", + file=sys.stderr, + ) + return 1 + + print(f"RAM: {ram_bytes} bytes", file=sys.stderr) + print(f"Flash: {flash_bytes} bytes", file=sys.stderr) + + if args.output_env: + # Output to GitHub Actions + write_github_output( + { + "ram_usage": ram_bytes, + "flash_usage": flash_bytes, + } + ) + else: + print(f"{ram_bytes},{flash_bytes}") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) From 25a6202bb9cc86e2ed8258f17ff98a476dbda2cc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:09:01 -1000 Subject: [PATCH 002/336] [ci] Automatic Flash/RAM impact analysis --- .github/workflows/ci.yml | 117 ++++++++++++++++++++++ .github/workflows/memory-impact.yml | 150 ---------------------------- script/ci_memory_impact_detector.py | 134 ------------------------- script/determine-jobs.py | 99 +++++++++++++++++- 4 files changed, 215 insertions(+), 285 deletions(-) delete mode 100644 .github/workflows/memory-impact.yml delete mode 100755 script/ci_memory_impact_detector.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0363b5afdf..7a731a1b02 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -179,6 +179,7 @@ jobs: changed-components: ${{ steps.determine.outputs.changed-components }} changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }} component-test-count: ${{ steps.determine.outputs.component-test-count }} + memory_impact: ${{ steps.determine.outputs.memory-impact }} steps: - name: Check out code from GitHub uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -207,6 +208,7 @@ jobs: echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT + echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT integration-tests: name: Run integration tests @@ -510,6 +512,118 @@ jobs: - uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0 if: always() + memory-impact-target-branch: + name: Build target branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out target branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + ref: ${{ github.base_ref }} + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + - name: Compile test configuration and extract memory usage + id: extract + run: | + . venv/bin/activate + component="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }}" + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + test_file="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).test_file }}" + + echo "Compiling $component for $platform using $test_file" + python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env + + memory-impact-pr-branch: + name: Build PR branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out PR branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + - name: Compile test configuration and extract memory usage + id: extract + run: | + . venv/bin/activate + component="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }}" + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + test_file="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).test_file }}" + + echo "Compiling $component for $platform using $test_file" + python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env + + memory-impact-comment: + name: Comment memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + - memory-impact-target-branch + - memory-impact-pr-branch + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + permissions: + contents: read + pull-requests: write + steps: + - name: Check out code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Post or update PR comment + env: + GH_TOKEN: ${{ github.token }} + COMPONENT: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }} + PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }} + TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }} + TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }} + PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }} + PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }} + run: | + . venv/bin/activate + python script/ci_memory_impact_comment.py \ + --pr-number "${{ github.event.pull_request.number }}" \ + --component "$COMPONENT" \ + --platform "$PLATFORM" \ + --target-ram "$TARGET_RAM" \ + --target-flash "$TARGET_FLASH" \ + --pr-ram "$PR_RAM" \ + --pr-flash "$PR_FLASH" + ci-status: name: CI Status runs-on: ubuntu-24.04 @@ -525,6 +639,9 @@ jobs: - test-build-components-splitter - test-build-components-split - pre-commit-ci-lite + - memory-impact-target-branch + - memory-impact-pr-branch + - memory-impact-comment if: always() steps: - name: Success diff --git a/.github/workflows/memory-impact.yml b/.github/workflows/memory-impact.yml deleted file mode 100644 index dff73e6cd7..0000000000 --- a/.github/workflows/memory-impact.yml +++ /dev/null @@ -1,150 +0,0 @@ ---- -name: Memory Impact Analysis - -on: - pull_request: - paths: - - "esphome/components/**" - - "esphome/core/**" - -permissions: - contents: read - pull-requests: write - -env: - DEFAULT_PYTHON: "3.11" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} - cancel-in-progress: true - -jobs: - detect-single-component: - name: Detect single component change - runs-on: ubuntu-24.04 - outputs: - should_run: ${{ steps.detect.outputs.should_run }} - component: ${{ steps.detect.outputs.component }} - test_file: ${{ steps.detect.outputs.test_file }} - platform: ${{ steps.detect.outputs.platform }} - steps: - - name: Check out code from GitHub - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - fetch-depth: 0 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: ${{ env.DEFAULT_PYTHON }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install PyYAML - - name: Detect single component change - id: detect - run: | - python script/ci_memory_impact_detector.py - - build-target-branch: - name: Build target branch - runs-on: ubuntu-24.04 - needs: detect-single-component - if: needs.detect-single-component.outputs.should_run == 'true' - outputs: - ram_usage: ${{ steps.extract.outputs.ram_usage }} - flash_usage: ${{ steps.extract.outputs.flash_usage }} - steps: - - name: Check out target branch - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - ref: ${{ github.base_ref }} - - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: ${{ env.DEFAULT_PYTHON }} - - name: Install ESPHome - run: | - pip install -e . - - name: Cache platformio - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 - with: - path: ~/.platformio - key: platformio-memory-${{ needs.detect-single-component.outputs.platform }}-${{ hashFiles('platformio.ini') }} - - name: Compile test configuration and extract memory usage - id: extract - run: | - component="${{ needs.detect-single-component.outputs.component }}" - platform="${{ needs.detect-single-component.outputs.platform }}" - test_file="${{ needs.detect-single-component.outputs.test_file }}" - - echo "Compiling $component for $platform using $test_file" - python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - - build-pr-branch: - name: Build PR branch - runs-on: ubuntu-24.04 - needs: detect-single-component - if: needs.detect-single-component.outputs.should_run == 'true' - outputs: - ram_usage: ${{ steps.extract.outputs.ram_usage }} - flash_usage: ${{ steps.extract.outputs.flash_usage }} - steps: - - name: Check out PR branch - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: ${{ env.DEFAULT_PYTHON }} - - name: Install ESPHome - run: | - pip install -e . - - name: Cache platformio - uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 - with: - path: ~/.platformio - key: platformio-memory-${{ needs.detect-single-component.outputs.platform }}-${{ hashFiles('platformio.ini') }} - - name: Compile test configuration and extract memory usage - id: extract - run: | - component="${{ needs.detect-single-component.outputs.component }}" - platform="${{ needs.detect-single-component.outputs.platform }}" - test_file="${{ needs.detect-single-component.outputs.test_file }}" - - echo "Compiling $component for $platform using $test_file" - python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - - comment-results: - name: Comment memory impact - runs-on: ubuntu-24.04 - needs: - - detect-single-component - - build-target-branch - - build-pr-branch - if: needs.detect-single-component.outputs.should_run == 'true' - steps: - - name: Check out code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: ${{ env.DEFAULT_PYTHON }} - - name: Post or update PR comment - env: - GH_TOKEN: ${{ github.token }} - COMPONENT: ${{ needs.detect-single-component.outputs.component }} - PLATFORM: ${{ needs.detect-single-component.outputs.platform }} - TARGET_RAM: ${{ needs.build-target-branch.outputs.ram_usage }} - TARGET_FLASH: ${{ needs.build-target-branch.outputs.flash_usage }} - PR_RAM: ${{ needs.build-pr-branch.outputs.ram_usage }} - PR_FLASH: ${{ needs.build-pr-branch.outputs.flash_usage }} - run: | - python script/ci_memory_impact_comment.py \ - --pr-number "${{ github.event.pull_request.number }}" \ - --component "$COMPONENT" \ - --platform "$PLATFORM" \ - --target-ram "$TARGET_RAM" \ - --target-flash "$TARGET_FLASH" \ - --pr-ram "$PR_RAM" \ - --pr-flash "$PR_FLASH" diff --git a/script/ci_memory_impact_detector.py b/script/ci_memory_impact_detector.py deleted file mode 100755 index 8c3045ab00..0000000000 --- a/script/ci_memory_impact_detector.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env python3 -"""Detect if a PR changes exactly one component for memory impact analysis. - -This script is used by the CI workflow to determine if a PR should trigger -memory impact analysis. The analysis only runs when: -1. Exactly one component has changed (not counting core changes) -2. The component has at least one test configuration - -The script outputs GitHub Actions environment variables to control the workflow. -""" - -from __future__ import annotations - -from pathlib import Path -import sys - -# Add esphome to path -sys.path.insert(0, str(Path(__file__).parent.parent)) - -# pylint: disable=wrong-import-position -from script.ci_helpers import write_github_output -from script.helpers import ESPHOME_COMPONENTS_PATH, changed_files - -# Platform preference order for memory impact analysis -# Ordered by production relevance and memory constraint importance -PLATFORM_PREFERENCE = [ - "esp32-idf", # Primary ESP32 IDF platform - "esp32-c3-idf", # ESP32-C3 IDF - "esp32-c6-idf", # ESP32-C6 IDF - "esp32-s2-idf", # ESP32-S2 IDF - "esp32-s3-idf", # ESP32-S3 IDF - "esp32-c2-idf", # ESP32-C2 IDF - "esp32-c5-idf", # ESP32-C5 IDF - "esp32-h2-idf", # ESP32-H2 IDF - "esp32-p4-idf", # ESP32-P4 IDF - "esp8266-ard", # ESP8266 Arduino (memory constrained) - "esp32-ard", # ESP32 Arduino - "esp32-c3-ard", # ESP32-C3 Arduino - "esp32-s2-ard", # ESP32-S2 Arduino - "esp32-s3-ard", # ESP32-S3 Arduino - "bk72xx-ard", # BK72xx Arduino - "rp2040-ard", # RP2040 Arduino - "nrf52-adafruit", # nRF52 Adafruit - "host", # Host platform (development/testing) -] - - -def find_test_for_component(component: str) -> tuple[str | None, str | None]: - """Find a test configuration for the given component. - - Prefers platforms based on PLATFORM_PREFERENCE order. - - Args: - component: Component name - - Returns: - Tuple of (test_file_name, platform) or (None, None) if no test found - """ - tests_dir = Path(__file__).parent.parent / "tests" / "components" / component - - if not tests_dir.exists(): - return None, None - - # Look for test files - test_files = list(tests_dir.glob("test.*.yaml")) - if not test_files: - return None, None - - # Try each preferred platform in order - for preferred_platform in PLATFORM_PREFERENCE: - for test_file in test_files: - parts = test_file.stem.split(".") - if len(parts) >= 2: - platform = parts[1] - if platform == preferred_platform: - return test_file.name, platform - - # Fall back to first test file - test_file = test_files[0] - parts = test_file.stem.split(".") - platform = parts[1] if len(parts) >= 2 else "esp32-idf" - return test_file.name, platform - - -def detect_single_component_change() -> None: - """Detect if exactly one component changed and output GitHub Actions variables.""" - files = changed_files() - - # Find all changed components (excluding core) - changed_components = set() - - for file in files: - if file.startswith(ESPHOME_COMPONENTS_PATH): - parts = file.split("/") - if len(parts) >= 3: - component = parts[2] - # Skip base bus components as they're used across many builds - if component not in ["i2c", "spi", "uart", "modbus"]: - changed_components.add(component) - - # Only proceed if exactly one component changed - if len(changed_components) != 1: - print( - f"Found {len(changed_components)} component(s) changed, skipping memory analysis" - ) - write_github_output({"should_run": "false"}) - return - - component = list(changed_components)[0] - print(f"Detected single component change: {component}") - - # Find a test configuration for this component - test_file, platform = find_test_for_component(component) - - if not test_file: - print(f"No test configuration found for {component}, skipping memory analysis") - write_github_output({"should_run": "false"}) - return - - print(f"Found test: {test_file} for platform: {platform}") - print("Memory impact analysis will run") - - write_github_output( - { - "should_run": "true", - "component": component, - "test_file": test_file, - "platform": platform, - } - ) - - -if __name__ == "__main__": - detect_single_component_change() diff --git a/script/determine-jobs.py b/script/determine-jobs.py index a078fd8f9b..78fd32c3f4 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -10,7 +10,13 @@ what files have changed. It outputs JSON with the following structure: "clang_format": true/false, "python_linters": true/false, "changed_components": ["component1", "component2", ...], - "component_test_count": 5 + "component_test_count": 5, + "memory_impact": { + "should_run": "true/false", + "component": "component_name", + "test_file": "test.esp32-idf.yaml", + "platform": "esp32-idf" + } } The CI workflow uses this information to: @@ -20,6 +26,7 @@ The CI workflow uses this information to: - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Determine which components to test individually - Decide how to split component tests (if there are many) +- Run memory impact analysis when exactly one component changes Usage: python script/determine-jobs.py [-b BRANCH] @@ -212,6 +219,92 @@ def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) return any(file.endswith(extensions) for file in changed_files(branch)) +def detect_single_component_for_memory_impact( + changed_components: list[str], +) -> dict[str, Any]: + """Detect if exactly one component changed for memory impact analysis. + + Args: + changed_components: List of changed component names + + Returns: + Dictionary with memory impact analysis parameters: + - should_run: "true" or "false" + - component: component name (if should_run is true) + - test_file: test file name (if should_run is true) + - platform: platform name (if should_run is true) + """ + # Platform preference order for memory impact analysis + # Ordered by production relevance and memory constraint importance + PLATFORM_PREFERENCE = [ + "esp32-idf", # Primary ESP32 IDF platform + "esp32-c3-idf", # ESP32-C3 IDF + "esp32-c6-idf", # ESP32-C6 IDF + "esp32-s2-idf", # ESP32-S2 IDF + "esp32-s3-idf", # ESP32-S3 IDF + "esp32-c2-idf", # ESP32-C2 IDF + "esp32-c5-idf", # ESP32-C5 IDF + "esp32-h2-idf", # ESP32-H2 IDF + "esp32-p4-idf", # ESP32-P4 IDF + "esp8266-ard", # ESP8266 Arduino (memory constrained) + "esp32-ard", # ESP32 Arduino + "esp32-c3-ard", # ESP32-C3 Arduino + "esp32-s2-ard", # ESP32-S2 Arduino + "esp32-s3-ard", # ESP32-S3 Arduino + "bk72xx-ard", # BK72xx Arduino + "rp2040-ard", # RP2040 Arduino + "nrf52-adafruit", # nRF52 Adafruit + "host", # Host platform (development/testing) + ] + + # Skip base bus components as they're used across many builds + filtered_components = [ + c for c in changed_components if c not in ["i2c", "spi", "uart", "modbus"] + ] + + # Only proceed if exactly one component changed + if len(filtered_components) != 1: + return {"should_run": "false"} + + component = filtered_components[0] + + # Find a test configuration for this component + tests_dir = Path(root_path) / "tests" / "components" / component + + if not tests_dir.exists(): + return {"should_run": "false"} + + # Look for test files + test_files = list(tests_dir.glob("test.*.yaml")) + if not test_files: + return {"should_run": "false"} + + # Try each preferred platform in order + for preferred_platform in PLATFORM_PREFERENCE: + for test_file in test_files: + parts = test_file.stem.split(".") + if len(parts) >= 2: + platform = parts[1] + if platform == preferred_platform: + return { + "should_run": "true", + "component": component, + "test_file": test_file.name, + "platform": platform, + } + + # Fall back to first test file + test_file = test_files[0] + parts = test_file.stem.split(".") + platform = parts[1] if len(parts) >= 2 else "esp32-idf" + return { + "should_run": "true", + "component": component, + "test_file": test_file.name, + "platform": platform, + } + + def main() -> None: """Main function that determines which CI jobs to run.""" parser = argparse.ArgumentParser( @@ -247,6 +340,9 @@ def main() -> None: and any(component_test_dir.glob("test.*.yaml")) ] + # Detect single component change for memory impact analysis + memory_impact = detect_single_component_for_memory_impact(changed_components) + # Build output output: dict[str, Any] = { "integration_tests": run_integration, @@ -256,6 +352,7 @@ def main() -> None: "changed_components": changed_components, "changed_components_with_tests": changed_components_with_tests, "component_test_count": len(changed_components_with_tests), + "memory_impact": memory_impact, } # Output as JSON From 3bb95a190dc379cd5c7fcbf4b7b0d96f88e84126 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:15:44 -1000 Subject: [PATCH 003/336] fix --- script/determine-jobs.py | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 78fd32c3f4..ea43ed71ca 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -220,12 +220,16 @@ def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) def detect_single_component_for_memory_impact( - changed_components: list[str], + branch: str | None = None, ) -> dict[str, Any]: """Detect if exactly one component changed for memory impact analysis. + This analyzes the actual changed files (not dependencies) to determine if + exactly one component has been modified. This is different from the + changed_components list which includes all dependencies. + Args: - changed_components: List of changed component names + branch: Branch to compare against Returns: Dictionary with memory impact analysis parameters: @@ -257,16 +261,26 @@ def detect_single_component_for_memory_impact( "host", # Host platform (development/testing) ] - # Skip base bus components as they're used across many builds - filtered_components = [ - c for c in changed_components if c not in ["i2c", "spi", "uart", "modbus"] - ] + # Get actually changed files (not dependencies) + files = changed_files(branch) + + # Find all changed components (excluding core) + changed_component_set = set() + + for file in files: + if file.startswith(ESPHOME_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 3: + component = parts[2] + # Skip base bus components as they're used across many builds + if component not in ["i2c", "spi", "uart", "modbus"]: + changed_component_set.add(component) # Only proceed if exactly one component changed - if len(filtered_components) != 1: + if len(changed_component_set) != 1: return {"should_run": "false"} - component = filtered_components[0] + component = list(changed_component_set)[0] # Find a test configuration for this component tests_dir = Path(root_path) / "tests" / "components" / component @@ -341,7 +355,7 @@ def main() -> None: ] # Detect single component change for memory impact analysis - memory_impact = detect_single_component_for_memory_impact(changed_components) + memory_impact = detect_single_component_for_memory_impact(args.branch) # Build output output: dict[str, Any] = { From daa39a489d9d762d31d636c98613f87cf2a5e298 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:20:31 -1000 Subject: [PATCH 004/336] fix tests --- tests/script/test_determine_jobs.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 5d8746f434..9c8b8d39af 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -59,12 +59,22 @@ def mock_subprocess_run() -> Generator[Mock, None, None]: yield mock +@pytest.fixture +def mock_changed_files() -> Generator[Mock, None, None]: + """Mock changed_files for memory impact detection.""" + with patch.object(determine_jobs, "changed_files") as mock: + # Default to empty list + mock.return_value = [] + yield mock + + def test_main_all_tests_should_run( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], ) -> None: """Test when all tests should run.""" @@ -98,6 +108,9 @@ def test_main_all_tests_should_run( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" # No files changed def test_main_no_tests_should_run( @@ -106,6 +119,7 @@ def test_main_no_tests_should_run( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], ) -> None: """Test when no tests should run.""" @@ -134,6 +148,9 @@ def test_main_no_tests_should_run( assert output["changed_components"] == [] assert output["changed_components_with_tests"] == [] assert output["component_test_count"] == 0 + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" def test_main_list_components_fails( @@ -167,6 +184,7 @@ def test_main_with_branch_argument( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], ) -> None: """Test with branch argument.""" @@ -212,6 +230,9 @@ def test_main_with_branch_argument( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" def test_should_run_integration_tests( @@ -399,6 +420,7 @@ def test_main_filters_components_without_tests( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], tmp_path: Path, ) -> None: @@ -448,3 +470,6 @@ def test_main_filters_components_without_tests( assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"} # component_test_count should be based on components with tests assert output["component_test_count"] == 2 + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" From 5da589abd0db74c20ee33addbc95688d68b5054a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:27:13 -1000 Subject: [PATCH 005/336] fix --- tests/script/test_determine_jobs.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 9c8b8d39af..65eef4f785 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -76,8 +76,12 @@ def test_main_all_tests_should_run( mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when all tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = True mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = True @@ -121,8 +125,12 @@ def test_main_no_tests_should_run( mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when no tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False @@ -186,8 +194,12 @@ def test_main_with_branch_argument( mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test with branch argument.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = False @@ -423,8 +435,12 @@ def test_main_filters_components_without_tests( mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that components without test files are filtered out.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False From 11f5f7683c51bb28d3c2349230564882847d82e3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:32:21 -1000 Subject: [PATCH 006/336] tidy --- script/ci_memory_impact_comment.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 69f703bd78..af8449aa99 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -115,10 +115,10 @@ def find_existing_comment(pr_number: str) -> str | None: pr_number: PR number Returns: - Comment ID if found, None otherwise + Comment numeric ID (databaseId) if found, None otherwise """ try: - # List all comments on the PR + # List all comments on the PR with both id (node ID) and databaseId (numeric ID) result = subprocess.run( [ "gh", @@ -128,7 +128,7 @@ def find_existing_comment(pr_number: str) -> str | None: "--json", "comments", "--jq", - ".comments[]", + ".comments[] | {id, databaseId, body}", ], capture_output=True, text=True, @@ -143,7 +143,8 @@ def find_existing_comment(pr_number: str) -> str | None: try: comment = json.loads(line) if COMMENT_MARKER in comment.get("body", ""): - return str(comment["id"]) + # Return the numeric databaseId, not the node ID + return str(comment["databaseId"]) except json.JSONDecodeError: continue From 7b6acd3c002d2dc9ecd258f7d8f1f4dcabd6cf3d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:33:31 -1000 Subject: [PATCH 007/336] tidy --- script/ci_memory_impact_comment.py | 34 +++++++++++++----------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index af8449aa99..da962efb11 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -18,27 +18,23 @@ COMMENT_MARKER = "" def format_bytes(bytes_value: int) -> str: - """Format bytes value with appropriate unit. + """Format bytes value with comma separators. Args: bytes_value: Number of bytes Returns: - Formatted string (e.g., "1.5 KB", "256 bytes") + Formatted string with comma separators (e.g., "1,234 bytes") """ - if bytes_value < 1024: - return f"{bytes_value} bytes" - if bytes_value < 1024 * 1024: - return f"{bytes_value / 1024:.2f} KB" - return f"{bytes_value / (1024 * 1024):.2f} MB" + return f"{bytes_value:,} bytes" def format_change(before: int, after: int) -> str: """Format memory change with delta and percentage. Args: - before: Memory usage before change - after: Memory usage after change + before: Memory usage before change (in bytes) + after: Memory usage after change (in bytes) Returns: Formatted string with delta and percentage @@ -46,8 +42,16 @@ def format_change(before: int, after: int) -> str: delta = after - before percentage = 0.0 if before == 0 else (delta / before) * 100 - # Format delta with sign - delta_str = f"+{format_bytes(delta)}" if delta >= 0 else format_bytes(delta) + # Format delta with sign and always show in bytes for precision + if delta > 0: + delta_str = f"+{delta:,} bytes" + emoji = "📈" + elif delta < 0: + delta_str = f"{delta:,} bytes" + emoji = "📉" + else: + delta_str = "+0 bytes" + emoji = "➡️" # Format percentage with sign if percentage > 0: @@ -57,14 +61,6 @@ def format_change(before: int, after: int) -> str: else: pct_str = "0.00%" - # Add emoji indicator - if delta > 0: - emoji = "📈" - elif delta < 0: - emoji = "📉" - else: - emoji = "➡️" - return f"{emoji} {delta_str} ({pct_str})" From 354f46f7c0962867727cf296babec08cc7c99a61 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:38:41 -1000 Subject: [PATCH 008/336] debug --- script/ci_memory_impact_comment.py | 63 +++++++++++++++++++++++++----- 1 file changed, 54 insertions(+), 9 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index da962efb11..804e369efc 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -114,6 +114,10 @@ def find_existing_comment(pr_number: str) -> str | None: Comment numeric ID (databaseId) if found, None otherwise """ try: + print( + f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr + ) + # List all comments on the PR with both id (node ID) and databaseId (numeric ID) result = subprocess.run( [ @@ -131,23 +135,46 @@ def find_existing_comment(pr_number: str) -> str | None: check=True, ) + print(f"DEBUG: gh pr view output:\n{result.stdout}", file=sys.stderr) + # Parse comments and look for our marker + comment_count = 0 for line in result.stdout.strip().split("\n"): if not line: continue try: comment = json.loads(line) - if COMMENT_MARKER in comment.get("body", ""): + comment_count += 1 + print( + f"DEBUG: Checking comment {comment_count}: id={comment.get('id')}, databaseId={comment.get('databaseId')}", + file=sys.stderr, + ) + + body = comment.get("body", "") + if COMMENT_MARKER in body: + database_id = str(comment["databaseId"]) + print( + f"DEBUG: Found existing comment with databaseId={database_id}", + file=sys.stderr, + ) # Return the numeric databaseId, not the node ID - return str(comment["databaseId"]) - except json.JSONDecodeError: + return database_id + print("DEBUG: Comment does not contain marker", file=sys.stderr) + except json.JSONDecodeError as e: + print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) continue + print( + f"DEBUG: No existing comment found (checked {comment_count} comments)", + file=sys.stderr, + ) return None except subprocess.CalledProcessError as e: print(f"Error finding existing comment: {e}", file=sys.stderr) + if e.stderr: + print(f"stderr: {e.stderr.decode()}", file=sys.stderr) return None @@ -165,10 +192,13 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> bool: existing_comment_id = find_existing_comment(pr_number) try: - if existing_comment_id: + if existing_comment_id and existing_comment_id != "None": # Update existing comment - print(f"Updating existing comment {existing_comment_id}", file=sys.stderr) - subprocess.run( + print( + f"DEBUG: Updating existing comment {existing_comment_id}", + file=sys.stderr, + ) + result = subprocess.run( [ "gh", "api", @@ -180,15 +210,22 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> bool: ], check=True, capture_output=True, + text=True, ) + print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) else: # Post new comment - print("Posting new comment", file=sys.stderr) - subprocess.run( + print( + f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})", + file=sys.stderr, + ) + result = subprocess.run( ["gh", "pr", "comment", pr_number, "--body", comment_body], check=True, capture_output=True, + text=True, ) + print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) print("Comment posted/updated successfully", file=sys.stderr) return True @@ -196,7 +233,15 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> bool: except subprocess.CalledProcessError as e: print(f"Error posting/updating comment: {e}", file=sys.stderr) if e.stderr: - print(f"stderr: {e.stderr.decode()}", file=sys.stderr) + print( + f"stderr: {e.stderr.decode() if isinstance(e.stderr, bytes) else e.stderr}", + file=sys.stderr, + ) + if e.stdout: + print( + f"stdout: {e.stdout.decode() if isinstance(e.stdout, bytes) else e.stdout}", + file=sys.stderr, + ) return False From 8e6ee2bed18a1fd5ca5ddd818d59f71a5f86561e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Oct 2025 13:43:58 -1000 Subject: [PATCH 009/336] debug --- script/ci_memory_impact_comment.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 804e369efc..e3e70d601f 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -111,31 +111,31 @@ def find_existing_comment(pr_number: str) -> str | None: pr_number: PR number Returns: - Comment numeric ID (databaseId) if found, None otherwise + Comment numeric ID if found, None otherwise """ try: print( f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr ) - # List all comments on the PR with both id (node ID) and databaseId (numeric ID) + # Use gh api to get comments directly - this returns the numeric id field result = subprocess.run( [ "gh", - "pr", - "view", - pr_number, - "--json", - "comments", + "api", + f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", "--jq", - ".comments[] | {id, databaseId, body}", + ".[] | {id, body}", ], capture_output=True, text=True, check=True, ) - print(f"DEBUG: gh pr view output:\n{result.stdout}", file=sys.stderr) + print( + f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", + file=sys.stderr, + ) # Parse comments and look for our marker comment_count = 0 @@ -146,20 +146,20 @@ def find_existing_comment(pr_number: str) -> str | None: try: comment = json.loads(line) comment_count += 1 + comment_id = comment.get("id") print( - f"DEBUG: Checking comment {comment_count}: id={comment.get('id')}, databaseId={comment.get('databaseId')}", + f"DEBUG: Checking comment {comment_count}: id={comment_id}", file=sys.stderr, ) body = comment.get("body", "") if COMMENT_MARKER in body: - database_id = str(comment["databaseId"]) print( - f"DEBUG: Found existing comment with databaseId={database_id}", + f"DEBUG: Found existing comment with id={comment_id}", file=sys.stderr, ) - # Return the numeric databaseId, not the node ID - return database_id + # Return the numeric id + return str(comment_id) print("DEBUG: Comment does not contain marker", file=sys.stderr) except json.JSONDecodeError as e: print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) From 3ba2212cfc22d7369a11f91035fc7476889b323f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:01:32 -1000 Subject: [PATCH 010/336] fix flakey --- .../test_sensor_filters_ring_buffer.py | 16 +++++-- .../test_sensor_filters_sliding_window.py | 48 +++++++++++++++---- 2 files changed, 52 insertions(+), 12 deletions(-) diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py index 8edb1600d9..da4862c14b 100644 --- a/tests/integration/test_sensor_filters_ring_buffer.py +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -8,6 +8,7 @@ from aioesphomeapi import EntityState, SensorState import pytest from .sensor_test_utils import build_key_to_sensor_mapping +from .state_utils import InitialStateHelper from .types import APIClientConnectedFactory, RunCompiledFunction @@ -36,7 +37,7 @@ async def test_sensor_filters_ring_buffer( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return @@ -76,8 +77,17 @@ async def test_sensor_filters_ring_buffer( ], ) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py index 2183946134..389cbf2659 100644 --- a/tests/integration/test_sensor_filters_sliding_window.py +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -8,6 +8,7 @@ from aioesphomeapi import EntityState, SensorState import pytest from .sensor_test_utils import build_key_to_sensor_mapping +from .state_utils import InitialStateHelper from .types import APIClientConnectedFactory, RunCompiledFunction @@ -41,7 +42,7 @@ async def test_sensor_filters_sliding_window( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return @@ -108,8 +109,17 @@ async def test_sensor_filters_sliding_window( ], ) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( @@ -207,11 +217,12 @@ async def test_sensor_filters_nan_handling( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return sensor_name = key_to_sensor.get(state.key) + if sensor_name == "min_nan": min_states.append(state.state) elif sensor_name == "max_nan": @@ -236,8 +247,17 @@ async def test_sensor_filters_nan_handling( # Build key-to-sensor mapping key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"]) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( @@ -305,11 +325,12 @@ async def test_sensor_filters_ring_buffer_wraparound( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return sensor_name = key_to_sensor.get(state.key) + if sensor_name == "wraparound_min": min_states.append(state.state) # With batch_delay: 0ms, we should receive all 3 outputs @@ -326,8 +347,17 @@ async def test_sensor_filters_ring_buffer_wraparound( # Build key-to-sensor mapping key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"]) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial state + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial state") # Find the publish button publish_button = next( From 44ad787cb3da66ab61190abb27450abf8d990672 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:04:42 -1000 Subject: [PATCH 011/336] fix flakey --- tests/integration/state_utils.py | 146 ++++++++++++++++++ .../test_sensor_filters_ring_buffer.py | 40 ++--- .../test_sensor_filters_sliding_window.py | 40 ++--- 3 files changed, 186 insertions(+), 40 deletions(-) create mode 100644 tests/integration/state_utils.py diff --git a/tests/integration/state_utils.py b/tests/integration/state_utils.py new file mode 100644 index 0000000000..7392393501 --- /dev/null +++ b/tests/integration/state_utils.py @@ -0,0 +1,146 @@ +"""Shared utilities for ESPHome integration tests - state handling.""" + +from __future__ import annotations + +import asyncio +import logging + +from aioesphomeapi import ButtonInfo, EntityInfo, EntityState + +_LOGGER = logging.getLogger(__name__) + + +class InitialStateHelper: + """Helper to wait for initial states before processing test states. + + When an API client connects, ESPHome sends the current state of all entities. + This helper wraps the user's state callback and swallows the first state for + each entity, then forwards all subsequent states to the user callback. + + Usage: + entities, services = await client.list_entities_services() + helper = InitialStateHelper(entities) + client.subscribe_states(helper.on_state_wrapper(user_callback)) + await helper.wait_for_initial_states() + """ + + def __init__(self, entities: list[EntityInfo]) -> None: + """Initialize the helper. + + Args: + entities: All entities from list_entities_services() + """ + # Set of (device_id, key) tuples waiting for initial state + # Buttons are stateless, so exclude them + self._wait_initial_states = { + (entity.device_id, entity.key) + for entity in entities + if not isinstance(entity, ButtonInfo) + } + # Keep entity info for debugging - use (device_id, key) tuple + self._entities_by_id = { + (entity.device_id, entity.key): entity for entity in entities + } + + # Log all entities + _LOGGER.debug( + "InitialStateHelper: Found %d total entities: %s", + len(entities), + [(type(e).__name__, e.object_id) for e in entities], + ) + + # Log which ones we're waiting for + _LOGGER.debug( + "InitialStateHelper: Waiting for %d entities (excluding ButtonInfo): %s", + len(self._wait_initial_states), + [self._entities_by_id[k].object_id for k in self._wait_initial_states], + ) + + # Log which ones we're NOT waiting for + not_waiting = { + (e.device_id, e.key) for e in entities + } - self._wait_initial_states + _LOGGER.debug( + "InitialStateHelper: NOT waiting for %d entities: %s", + len(not_waiting), + [ + ( + type(self._entities_by_id[k]).__name__, + self._entities_by_id[k].object_id, + ) + for k in not_waiting + ], + ) + + # Create future in the running event loop + self._initial_states_received = asyncio.get_running_loop().create_future() + # If no entities to wait for, mark complete immediately + if not self._wait_initial_states: + self._initial_states_received.set_result(True) + + def on_state_wrapper(self, user_callback): + """Wrap a user callback to track initial states. + + Args: + user_callback: The user's state callback function + + Returns: + Wrapped callback that swallows first state per entity, forwards rest + """ + + def wrapper(state: EntityState) -> None: + """Swallow initial state per entity, forward subsequent states.""" + # Create entity identifier tuple + entity_id = (state.device_id, state.key) + + # Log which entity is sending state + if entity_id in self._entities_by_id: + entity = self._entities_by_id[entity_id] + _LOGGER.debug( + "Received state for %s (type: %s, device_id: %s, key: %d)", + entity.object_id, + type(entity).__name__, + state.device_id, + state.key, + ) + + # If this entity is waiting for initial state + if entity_id in self._wait_initial_states: + # Remove from waiting set + self._wait_initial_states.discard(entity_id) + + _LOGGER.debug( + "Swallowed initial state for %s, %d entities remaining", + self._entities_by_id[entity_id].object_id + if entity_id in self._entities_by_id + else entity_id, + len(self._wait_initial_states), + ) + + # Check if we've now seen all entities + if ( + not self._wait_initial_states + and not self._initial_states_received.done() + ): + _LOGGER.debug("All initial states received") + self._initial_states_received.set_result(True) + + # Don't forward initial state to user + return + + # Forward subsequent states to user callback + _LOGGER.debug("Forwarding state to user callback") + user_callback(state) + + return wrapper + + async def wait_for_initial_states(self, timeout: float = 5.0) -> None: + """Wait for all initial states to be received. + + Args: + timeout: Maximum time to wait in seconds + + Raises: + asyncio.TimeoutError: If initial states aren't received within timeout + """ + await asyncio.wait_for(self._initial_states_received, timeout=timeout) diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py index da4862c14b..5d00986cc2 100644 --- a/tests/integration/test_sensor_filters_ring_buffer.py +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -122,31 +122,31 @@ async def test_sensor_filters_ring_buffer( # Verify the values at each output position # Position 1: window=[1] - assert abs(sensor_states["sliding_min"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_median"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][0] - 1.0) < 0.01 + assert sensor_states["sliding_min"][0] == pytest.approx(1.0) + assert sensor_states["sliding_max"][0] == pytest.approx(1.0) + assert sensor_states["sliding_median"][0] == pytest.approx(1.0) + assert sensor_states["sliding_moving_avg"][0] == pytest.approx(1.0) # Position 3: window=[1,2,3] - assert abs(sensor_states["sliding_min"][1] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][1] - 3.0) < 0.01 - assert abs(sensor_states["sliding_median"][1] - 2.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][1] - 2.0) < 0.01 + assert sensor_states["sliding_min"][1] == pytest.approx(1.0) + assert sensor_states["sliding_max"][1] == pytest.approx(3.0) + assert sensor_states["sliding_median"][1] == pytest.approx(2.0) + assert sensor_states["sliding_moving_avg"][1] == pytest.approx(2.0) # Position 5: window=[1,2,3,4,5] - assert abs(sensor_states["sliding_min"][2] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][2] - 5.0) < 0.01 - assert abs(sensor_states["sliding_median"][2] - 3.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][2] - 3.0) < 0.01 + assert sensor_states["sliding_min"][2] == pytest.approx(1.0) + assert sensor_states["sliding_max"][2] == pytest.approx(5.0) + assert sensor_states["sliding_median"][2] == pytest.approx(3.0) + assert sensor_states["sliding_moving_avg"][2] == pytest.approx(3.0) # Position 7: window=[3,4,5,6,7] (ring buffer wrapped) - assert abs(sensor_states["sliding_min"][3] - 3.0) < 0.01 - assert abs(sensor_states["sliding_max"][3] - 7.0) < 0.01 - assert abs(sensor_states["sliding_median"][3] - 5.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][3] - 5.0) < 0.01 + assert sensor_states["sliding_min"][3] == pytest.approx(3.0) + assert sensor_states["sliding_max"][3] == pytest.approx(7.0) + assert sensor_states["sliding_median"][3] == pytest.approx(5.0) + assert sensor_states["sliding_moving_avg"][3] == pytest.approx(5.0) # Position 9: window=[5,6,7,8,9] (ring buffer wrapped) - assert abs(sensor_states["sliding_min"][4] - 5.0) < 0.01 - assert abs(sensor_states["sliding_max"][4] - 9.0) < 0.01 - assert abs(sensor_states["sliding_median"][4] - 7.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][4] - 7.0) < 0.01 + assert sensor_states["sliding_min"][4] == pytest.approx(5.0) + assert sensor_states["sliding_max"][4] == pytest.approx(9.0) + assert sensor_states["sliding_median"][4] == pytest.approx(7.0) + assert sensor_states["sliding_moving_avg"][4] == pytest.approx(7.0) diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py index 389cbf2659..57ab65acd4 100644 --- a/tests/integration/test_sensor_filters_sliding_window.py +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -58,33 +58,33 @@ async def test_sensor_filters_sliding_window( # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) if ( sensor_name == "min_sensor" - and abs(state.state - 2.0) < 0.01 + and state.state == pytest.approx(2.0) and not min_received.done() ): min_received.set_result(True) elif ( sensor_name == "max_sensor" - and abs(state.state - 6.0) < 0.01 + and state.state == pytest.approx(6.0) and not max_received.done() ): max_received.set_result(True) elif ( sensor_name == "median_sensor" - and abs(state.state - 4.0) < 0.01 + and state.state == pytest.approx(4.0) and not median_received.done() ): # Median of [2, 3, 4, 5, 6] = 4 median_received.set_result(True) elif ( sensor_name == "quantile_sensor" - and abs(state.state - 6.0) < 0.01 + and state.state == pytest.approx(6.0) and not quantile_received.done() ): # 90th percentile of [2, 3, 4, 5, 6] = 6 quantile_received.set_result(True) elif ( sensor_name == "moving_avg_sensor" - and abs(state.state - 4.0) < 0.01 + and state.state == pytest.approx(4.0) and not moving_avg_received.done() ): # Average of [2, 3, 4, 5, 6] = 4 @@ -168,30 +168,30 @@ async def test_sensor_filters_sliding_window( assert len(sensor_states["moving_avg_sensor"]) == 2 # Verify the first output (after 1 value: [1]) - assert abs(sensor_states["min_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["min_sensor"][0] == pytest.approx(1.0), ( f"First min should be 1.0, got {sensor_states['min_sensor'][0]}" ) - assert abs(sensor_states["max_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["max_sensor"][0] == pytest.approx(1.0), ( f"First max should be 1.0, got {sensor_states['max_sensor'][0]}" ) - assert abs(sensor_states["median_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["median_sensor"][0] == pytest.approx(1.0), ( f"First median should be 1.0, got {sensor_states['median_sensor'][0]}" ) - assert abs(sensor_states["moving_avg_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["moving_avg_sensor"][0] == pytest.approx(1.0), ( f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}" ) # Verify the second output (after 6 values, window has [2, 3, 4, 5, 6]) - assert abs(sensor_states["min_sensor"][1] - 2.0) < 0.01, ( + assert sensor_states["min_sensor"][1] == pytest.approx(2.0), ( f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}" ) - assert abs(sensor_states["max_sensor"][1] - 6.0) < 0.01, ( + assert sensor_states["max_sensor"][1] == pytest.approx(6.0), ( f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}" ) - assert abs(sensor_states["median_sensor"][1] - 4.0) < 0.01, ( + assert sensor_states["median_sensor"][1] == pytest.approx(4.0), ( f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}" ) - assert abs(sensor_states["moving_avg_sensor"][1] - 4.0) < 0.01, ( + assert sensor_states["moving_avg_sensor"][1] == pytest.approx(4.0), ( f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}" ) @@ -291,18 +291,18 @@ async def test_sensor_filters_nan_handling( ) # First output - assert abs(min_states[0] - 10.0) < 0.01, ( + assert min_states[0] == pytest.approx(10.0), ( f"First min should be 10.0, got {min_states[0]}" ) - assert abs(max_states[0] - 10.0) < 0.01, ( + assert max_states[0] == pytest.approx(10.0), ( f"First max should be 10.0, got {max_states[0]}" ) # Second output - verify NaN values were ignored - assert abs(min_states[1] - 5.0) < 0.01, ( + assert min_states[1] == pytest.approx(5.0), ( f"Second min should ignore NaN and return 5.0, got {min_states[1]}" ) - assert abs(max_states[1] - 15.0) < 0.01, ( + assert max_states[1] == pytest.approx(15.0), ( f"Second max should ignore NaN and return 15.0, got {max_states[1]}" ) @@ -385,12 +385,12 @@ async def test_sensor_filters_ring_buffer_wraparound( assert len(min_states) == 3, ( f"Should have 3 states, got {len(min_states)}: {min_states}" ) - assert abs(min_states[0] - 10.0) < 0.01, ( + assert min_states[0] == pytest.approx(10.0), ( f"First min should be 10.0, got {min_states[0]}" ) - assert abs(min_states[1] - 5.0) < 0.01, ( + assert min_states[1] == pytest.approx(5.0), ( f"Second min should be 5.0, got {min_states[1]}" ) - assert abs(min_states[2] - 15.0) < 0.01, ( + assert min_states[2] == pytest.approx(15.0), ( f"Third min should be 15.0, got {min_states[2]}" ) From 0200d7c358a4b8a23db1361d7aabe7c510ad536b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:05:39 -1000 Subject: [PATCH 012/336] fix flakey --- tests/integration/README.md | 76 +++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/tests/integration/README.md b/tests/integration/README.md index 8fce81bb80..a2ffb1358b 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -7,6 +7,8 @@ This directory contains end-to-end integration tests for ESPHome, focusing on te - `conftest.py` - Common fixtures and utilities - `const.py` - Constants used throughout the integration tests - `types.py` - Type definitions for fixtures and functions +- `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`) +- `sensor_test_utils.py` - Sensor-specific test utilities - `fixtures/` - YAML configuration files for tests - `test_*.py` - Individual test files @@ -26,6 +28,32 @@ The `yaml_config` fixture automatically loads YAML configurations based on the t - `reserved_tcp_port` - Reserves a TCP port by holding the socket open until ESPHome needs it - `unused_tcp_port` - Provides the reserved port number for each test +### Helper Utilities + +#### InitialStateHelper (`state_utils.py`) + +The `InitialStateHelper` class solves a common problem in integration tests: when an API client connects, ESPHome automatically broadcasts the current state of all entities. This can interfere with tests that want to track only new state changes triggered by test actions. + +**What it does:** +- Tracks all entities (except stateless ones like buttons) +- Swallows the first state broadcast for each entity +- Forwards all subsequent state changes to your test callback +- Provides `wait_for_initial_states()` to synchronize before test actions + +**When to use it:** +- Any test that triggers entity state changes and needs to verify them +- Tests that would otherwise see duplicate or unexpected states +- Tests that need clean separation between initial state and test-triggered changes + +**Implementation details:** +- Uses `(device_id, key)` tuples to uniquely identify entities across devices +- Automatically excludes `ButtonInfo` entities (stateless) +- Provides debug logging to track state reception (use `--log-cli-level=DEBUG`) +- Safe for concurrent use with multiple entity types + +**Future work:** +Consider converting existing integration tests to use `InitialStateHelper` for more reliable state tracking and to eliminate race conditions related to initial state broadcasts. + ### Writing Tests The simplest way to write a test is to use the `run_compiled` and `api_client_connected` fixtures: @@ -125,6 +153,54 @@ async def test_my_sensor( ``` ##### State Subscription Pattern + +**Recommended: Using InitialStateHelper** + +When an API client connects, ESPHome automatically sends the current state of all entities. The `InitialStateHelper` (from `state_utils.py`) handles this by swallowing these initial states and only forwarding subsequent state changes to your test callback: + +```python +from .state_utils import InitialStateHelper + +# Track state changes with futures +loop = asyncio.get_running_loop() +states: dict[int, EntityState] = {} +state_future: asyncio.Future[EntityState] = loop.create_future() + +def on_state(state: EntityState) -> None: + """This callback only receives NEW state changes, not initial states.""" + states[state.key] = state + # Check for specific condition using isinstance + if isinstance(state, SensorState) and state.state == expected_value: + if not state_future.done(): + state_future.set_result(state) + +# Get entities and set up state synchronization +entities, services = await client.list_entities_services() +initial_state_helper = InitialStateHelper(entities) + +# Subscribe with the wrapper that filters initial states +client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + +# Wait for all initial states to be broadcast +try: + await initial_state_helper.wait_for_initial_states() +except TimeoutError: + pytest.fail("Timeout waiting for initial states") + +# Now perform your test actions - on_state will only receive new changes +# ... trigger state changes ... + +# Wait for expected state +try: + result = await asyncio.wait_for(state_future, timeout=5.0) +except asyncio.TimeoutError: + pytest.fail(f"Expected state not received. Got: {list(states.values())}") +``` + +**Legacy: Manual State Tracking** + +If you need to handle initial states manually (not recommended for new tests): + ```python # Track state changes with futures loop = asyncio.get_running_loop() From b5c4dc13e010f2f7d9b37dec91aed88f254b8217 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:07:41 -1000 Subject: [PATCH 013/336] fix flakey --- tests/integration/state_utils.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/tests/integration/state_utils.py b/tests/integration/state_utils.py index 7392393501..5f34bb61d4 100644 --- a/tests/integration/state_utils.py +++ b/tests/integration/state_utils.py @@ -60,17 +60,16 @@ class InitialStateHelper: not_waiting = { (e.device_id, e.key) for e in entities } - self._wait_initial_states - _LOGGER.debug( - "InitialStateHelper: NOT waiting for %d entities: %s", - len(not_waiting), - [ - ( - type(self._entities_by_id[k]).__name__, - self._entities_by_id[k].object_id, - ) + if not_waiting: + not_waiting_info = [ + f"{type(self._entities_by_id[k]).__name__}:{self._entities_by_id[k].object_id}" for k in not_waiting - ], - ) + ] + _LOGGER.debug( + "InitialStateHelper: NOT waiting for %d entities: %s", + len(not_waiting), + not_waiting_info, + ) # Create future in the running event loop self._initial_states_received = asyncio.get_running_loop().create_future() From 7be04916acb1750e1b23817ae0488a1c23851d4e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:09:38 -1000 Subject: [PATCH 014/336] fix flakey --- tests/integration/README.md | 2 +- .../{sensor_test_utils.py => sensor_utils.py} | 0 tests/integration/state_utils.py | 22 +++++++++++++++++++ .../test_sensor_filters_ring_buffer.py | 5 ++--- .../test_sensor_filters_sliding_window.py | 9 ++++---- 5 files changed, 29 insertions(+), 9 deletions(-) rename tests/integration/{sensor_test_utils.py => sensor_utils.py} (100%) diff --git a/tests/integration/README.md b/tests/integration/README.md index a2ffb1358b..11c33fc5db 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -8,7 +8,7 @@ This directory contains end-to-end integration tests for ESPHome, focusing on te - `const.py` - Constants used throughout the integration tests - `types.py` - Type definitions for fixtures and functions - `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`) -- `sensor_test_utils.py` - Sensor-specific test utilities +- `sensor_utils.py` - Sensor-specific test utilities - `fixtures/` - YAML configuration files for tests - `test_*.py` - Individual test files diff --git a/tests/integration/sensor_test_utils.py b/tests/integration/sensor_utils.py similarity index 100% rename from tests/integration/sensor_test_utils.py rename to tests/integration/sensor_utils.py diff --git a/tests/integration/state_utils.py b/tests/integration/state_utils.py index 5f34bb61d4..58d6d2790f 100644 --- a/tests/integration/state_utils.py +++ b/tests/integration/state_utils.py @@ -10,6 +10,28 @@ from aioesphomeapi import ButtonInfo, EntityInfo, EntityState _LOGGER = logging.getLogger(__name__) +def build_key_to_entity_mapping( + entities: list[EntityInfo], entity_names: list[str] +) -> dict[int, str]: + """Build a mapping from entity keys to entity names. + + Args: + entities: List of entity info objects from the API + entity_names: List of entity names to search for in object_ids + + Returns: + Dictionary mapping entity keys to entity names + """ + key_to_entity: dict[int, str] = {} + for entity in entities: + obj_id = entity.object_id.lower() + for entity_name in entity_names: + if entity_name in obj_id: + key_to_entity[entity.key] = entity_name + break + return key_to_entity + + class InitialStateHelper: """Helper to wait for initial states before processing test states. diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py index 5d00986cc2..c8be8edce0 100644 --- a/tests/integration/test_sensor_filters_ring_buffer.py +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -7,8 +7,7 @@ import asyncio from aioesphomeapi import EntityState, SensorState import pytest -from .sensor_test_utils import build_key_to_sensor_mapping -from .state_utils import InitialStateHelper +from .state_utils import InitialStateHelper, build_key_to_entity_mapping from .types import APIClientConnectedFactory, RunCompiledFunction @@ -67,7 +66,7 @@ async def test_sensor_filters_ring_buffer( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping( + key_to_sensor = build_key_to_entity_mapping( entities, [ "sliding_min", diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py index 57ab65acd4..b0688a6536 100644 --- a/tests/integration/test_sensor_filters_sliding_window.py +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -7,8 +7,7 @@ import asyncio from aioesphomeapi import EntityState, SensorState import pytest -from .sensor_test_utils import build_key_to_sensor_mapping -from .state_utils import InitialStateHelper +from .state_utils import InitialStateHelper, build_key_to_entity_mapping from .types import APIClientConnectedFactory, RunCompiledFunction @@ -98,7 +97,7 @@ async def test_sensor_filters_sliding_window( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping( + key_to_sensor = build_key_to_entity_mapping( entities, [ "min_sensor", @@ -245,7 +244,7 @@ async def test_sensor_filters_nan_handling( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"]) + key_to_sensor = build_key_to_entity_mapping(entities, ["min_nan", "max_nan"]) # Set up initial state helper with all entities initial_state_helper = InitialStateHelper(entities) @@ -345,7 +344,7 @@ async def test_sensor_filters_ring_buffer_wraparound( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"]) + key_to_sensor = build_key_to_entity_mapping(entities, ["wraparound_min"]) # Set up initial state helper with all entities initial_state_helper = InitialStateHelper(entities) From 0cff6acdf4a7b8f8ab763535f57c3c9d1147e194 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:09:44 -1000 Subject: [PATCH 015/336] fix flakey --- tests/integration/sensor_utils.py | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100644 tests/integration/sensor_utils.py diff --git a/tests/integration/sensor_utils.py b/tests/integration/sensor_utils.py deleted file mode 100644 index c3843a26ab..0000000000 --- a/tests/integration/sensor_utils.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Shared utilities for sensor integration tests.""" - -from __future__ import annotations - -from aioesphomeapi import EntityInfo - - -def build_key_to_sensor_mapping( - entities: list[EntityInfo], sensor_names: list[str] -) -> dict[int, str]: - """Build a mapping from entity keys to sensor names. - - Args: - entities: List of entity info objects from the API - sensor_names: List of sensor names to search for in object_ids - - Returns: - Dictionary mapping entity keys to sensor names - """ - key_to_sensor: dict[int, str] = {} - for entity in entities: - obj_id = entity.object_id.lower() - for sensor_name in sensor_names: - if sensor_name in obj_id: - key_to_sensor[entity.key] = sensor_name - break - return key_to_sensor From 1118ef32c3437f29e8c8ac02dcc6885437b279d4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 06:16:37 -1000 Subject: [PATCH 016/336] preen --- tests/integration/README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/README.md b/tests/integration/README.md index 11c33fc5db..2a6b6fe564 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -7,8 +7,7 @@ This directory contains end-to-end integration tests for ESPHome, focusing on te - `conftest.py` - Common fixtures and utilities - `const.py` - Constants used throughout the integration tests - `types.py` - Type definitions for fixtures and functions -- `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`) -- `sensor_utils.py` - Sensor-specific test utilities +- `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`, `build_key_to_entity_mapping`) - `fixtures/` - YAML configuration files for tests - `test_*.py` - Individual test files From acfa325f23c9df37471392889866cf29eb2c7d83 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:22:01 -1000 Subject: [PATCH 017/336] merge --- esphome/analyze_memory.py | 1630 +++++++++++++++++++++++++++++++++++++ 1 file changed, 1630 insertions(+) create mode 100644 esphome/analyze_memory.py diff --git a/esphome/analyze_memory.py b/esphome/analyze_memory.py new file mode 100644 index 0000000000..70c324b33f --- /dev/null +++ b/esphome/analyze_memory.py @@ -0,0 +1,1630 @@ +"""Memory usage analyzer for ESPHome compiled binaries.""" + +from collections import defaultdict +import json +import logging +from pathlib import Path +import re +import subprocess + +_LOGGER = logging.getLogger(__name__) + +# Pattern to extract ESPHome component namespaces dynamically +ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") + +# Component identification rules +# Symbol patterns: patterns found in raw symbol names +SYMBOL_PATTERNS = { + "freertos": [ + "vTask", + "xTask", + "xQueue", + "pvPort", + "vPort", + "uxTask", + "pcTask", + "prvTimerTask", + "prvAddNewTaskToReadyList", + "pxReadyTasksLists", + "prvAddCurrentTaskToDelayedList", + "xEventGroupWaitBits", + "xRingbufferSendFromISR", + "prvSendItemDoneNoSplit", + "prvReceiveGeneric", + "prvSendAcquireGeneric", + "prvCopyItemAllowSplit", + "xEventGroup", + "xRingbuffer", + "prvSend", + "prvReceive", + "prvCopy", + "xPort", + "ulTaskGenericNotifyTake", + "prvIdleTask", + "prvInitialiseNewTask", + "prvIsYieldRequiredSMP", + "prvGetItemByteBuf", + "prvInitializeNewRingbuffer", + "prvAcquireItemNoSplit", + "prvNotifyQueueSetContainer", + "ucStaticTimerQueueStorage", + "eTaskGetState", + "main_task", + "do_system_init_fn", + "xSemaphoreCreateGenericWithCaps", + "vListInsert", + "uxListRemove", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "prvCheckItemFitsByteBuffer", + "prvGetCurMaxSizeAllowSplit", + "tick_hook", + "sys_sem_new", + "sys_arch_mbox_fetch", + "sys_arch_sem_wait", + "prvDeleteTCB", + "vQueueDeleteWithCaps", + "vRingbufferDeleteWithCaps", + "vSemaphoreDeleteWithCaps", + "prvCheckItemAvail", + "prvCheckTaskCanBeScheduledSMP", + "prvGetCurMaxSizeNoSplit", + "prvResetNextTaskUnblockTime", + "prvReturnItemByteBuf", + "vApplicationStackOverflowHook", + "vApplicationGetIdleTaskMemory", + "sys_init", + "sys_mbox_new", + "sys_arch_mbox_tryfetch", + ], + "xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"], + "heap": ["heap_", "multi_heap"], + "spi_flash": ["spi_flash"], + "rtc": ["rtc_", "rtcio_ll_"], + "gpio_driver": ["gpio_", "pins"], + "uart_driver": ["uart", "_uart", "UART"], + "timer": ["timer_", "esp_timer"], + "peripherals": ["periph_", "periman"], + "network_stack": [ + "vj_compress", + "raw_sendto", + "raw_input", + "etharp_", + "icmp_input", + "socket_ipv6", + "ip_napt", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + "netconn_", + "recv_raw", + "accept_function", + "netconn_recv_data", + "netconn_accept", + "netconn_write_vectors_partly", + "netconn_drain", + "raw_connect", + "raw_bind", + "icmp_send_response", + "sockets", + "icmp_dest_unreach", + "inet_chksum_pseudo", + "alloc_socket", + "done_socket", + "set_global_fd_sets", + "inet_chksum_pbuf", + "tryget_socket_unconn_locked", + "tryget_socket_unconn", + "cs_create_ctrl_sock", + "netbuf_alloc", + ], + "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "ap_", + "scan_", + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_", + "mac_", + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + "s_do_mapping", + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + ], + "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], + "wifi_bt_coex": ["coex"], + "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], + "bluedroid_bt": [ + "bluedroid", + "btc_", + "bta_", + "btm_", + "btu_", + "BTM_", + "GATT", + "L2CA_", + "smp_", + "gatts_", + "attp_", + "l2cu_", + "l2cb", + "smp_cb", + "BTA_GATTC_", + "SMP_", + "BTU_", + "BTA_Dm", + "GAP_Ble", + "BT_tx_if", + "host_recv_pkt_cb", + "saved_local_oob_data", + "string_to_bdaddr", + "string_is_bdaddr", + "CalConnectParamTimeout", + "transmit_fragment", + "transmit_data", + "event_command_ready", + "read_command_complete_header", + "parse_read_local_extended_features_response", + "parse_read_local_version_info_response", + "should_request_high", + "btdm_wakeup_request", + "BTA_SetAttributeValue", + "BTA_EnableBluetooth", + "transmit_command_futured", + "transmit_command", + "get_waiting_command", + "make_command", + "transmit_downward", + "host_recv_adv_packet", + "copy_extra_byte_in_db", + "parse_read_local_supported_commands_response", + ], + "crypto_math": [ + "ecp_", + "bignum_", + "mpi_", + "sswu", + "modp", + "dragonfly_", + "gcm_mult", + "__multiply", + "quorem", + "__mdiff", + "__lshift", + "__mprec_tens", + "ECC_", + "multiprecision_", + "mix_sub_columns", + "sbox", + "gfm2_sbox", + "gfm3_sbox", + "curve_p256", + "curve", + "p_256_init_curve", + "shift_sub_rows", + "rshift", + ], + "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], + "libc": [ + "printf", + "scanf", + "malloc", + "free", + "memcpy", + "memset", + "strcpy", + "strlen", + "_dtoa", + "_fopen", + "__sfvwrite_r", + "qsort", + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + "strncpy", + "_strtod_l", + "__gethex", + "__hexnan", + "_setenv_r", + "_tzset_unlocked_r", + "__tzcalc_limits", + "select", + "scalbnf", + "strtof", + "strtof_l", + "__d2b", + "__b2d", + "__s2b", + "_Balloc", + "__multadd", + "__lo0bits", + "__atexit0", + "__smakebuf_r", + "__swhatbuf_r", + "_sungetc_r", + "_close_r", + "_link_r", + "_unsetenv_r", + "_rename_r", + "__month_lengths", + "tzinfo", + "__ratio", + "__hi0bits", + "__ulp", + "__any_on", + "__copybits", + "L_shift", + "_fcntl_r", + "_lseek_r", + "_read_r", + "_write_r", + "_unlink_r", + "_fstat_r", + "access", + "fsync", + "tcsetattr", + "tcgetattr", + "tcflush", + "tcdrain", + "__ssrefill_r", + "_stat_r", + "__hexdig_fun", + "__mcmp", + "_fwalk_sglue", + "__fpclassifyf", + "_setlocale_r", + "_mbrtowc_r", + "fcntl", + "__match", + "_lock_close", + "__c$", + "__func__$", + "__FUNCTION__$", + "DAYS_IN_MONTH", + "_DAYS_BEFORE_MONTH", + "CSWTCH$", + "dst$", + "sulp", + ], + "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], + "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], + "file_io": [ + "fread", + "fwrite", + "fopen", + "fclose", + "fseek", + "ftell", + "fflush", + "s_fd_table", + ], + "string_formatting": [ + "snprintf", + "vsnprintf", + "sprintf", + "vsprintf", + "sscanf", + "vsscanf", + ], + "cpp_anonymous": ["_GLOBAL__N_", "n$"], + "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], + "static_init": ["_GLOBAL__sub_I_"], + "mdns_lib": ["mdns"], + "phy_radio": [ + "phy_", + "rf_", + "chip_", + "register_chipv7", + "pbus_", + "bb_", + "fe_", + "rfcal_", + "ram_rfcal", + "tx_pwctrl", + "rx_chan", + "set_rx_gain", + "set_chan", + "agc_reg", + "ram_txiq", + "ram_txdc", + "ram_gen_rx_gain", + "rx_11b_opt", + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "pwdet_sar2_init", + "ram_iq_est_enable", + "ram_rfpll_set_freq", + "ant_wifirx_cfg", + "ant_btrx_cfg", + "force_txrxoff", + "force_txrx_off", + "tx_paon_set", + "opt_11b_resart", + "rfpll_1p2_opt", + "ram_dc_iq_est", + "ram_start_tx_tone", + "ram_en_pwdet", + "ram_cbw2040_cfg", + "rxdc_est_min", + "i2cmst_reg_init", + "temprature_sens_read", + "ram_restart_cal", + "ram_write_gain_mem", + "ram_wait_rfpll_cal_end", + "txcal_debuge_mode", + "ant_wifitx_cfg", + "reg_init_begin", + ], + "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], + "wifi_lmac": ["lmac"], + "wifi_device": ["wdev", "wDev_"], + "power_mgmt": [ + "pm_", + "sleep", + "rtc_sleep", + "light_sleep", + "deep_sleep", + "power_down", + "g_pm", + ], + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + ], + "hal_layer": ["hal_"], + "clock_mgmt": [ + "clk_", + "clock_", + "rtc_clk", + "apb_", + "cpu_freq", + "setCpuFrequencyMhz", + ], + "cache_mgmt": ["cache"], + "flash_ops": ["flash", "image_load"], + "interrupt_handlers": [ + "isr", + "interrupt", + "intr_", + "exc_", + "exception", + "port_IntStack", + ], + "wrapper_functions": ["_wrapper"], + "error_handling": ["panic", "abort", "assert", "error_", "fault"], + "authentication": ["auth"], + "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], + "dhcp": ["dhcp", "handle_dhcp"], + "ethernet_phy": [ + "emac_", + "eth_phy_", + "phy_tlk110", + "phy_lan87", + "phy_ip101", + "phy_rtl", + "phy_dp83", + "phy_ksz", + "lan87xx_", + "rtl8201_", + "ip101_", + "ksz80xx_", + "jl1101_", + "dp83848_", + "eth_on_state_changed", + ], + "threading": ["pthread_", "thread_", "_task_"], + "pthread": ["pthread"], + "synchronization": ["mutex", "semaphore", "spinlock", "portMUX"], + "math_lib": [ + "sin", + "cos", + "tan", + "sqrt", + "pow", + "exp", + "log", + "atan", + "asin", + "acos", + "floor", + "ceil", + "fabs", + "round", + ], + "random": ["rand", "random", "rng_", "prng"], + "time_lib": [ + "time", + "clock", + "gettimeofday", + "settimeofday", + "localtime", + "gmtime", + "mktime", + "strftime", + ], + "console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"], + "rom_functions": ["r_", "rom_"], + "compiler_runtime": [ + "__divdi3", + "__udivdi3", + "__moddi3", + "__muldi3", + "__ashldi3", + "__ashrdi3", + "__lshrdi3", + "__cmpdi2", + "__fixdfdi", + "__floatdidf", + ], + "libgcc": ["libgcc", "_divdi3", "_udivdi3"], + "boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"], + "bootloader": ["bootloader_", "esp_bootloader"], + "app_framework": ["app_", "initArduino", "setup", "loop", "Update"], + "weak_symbols": ["__weak_"], + "compiler_builtins": ["__builtin_"], + "vfs": ["vfs_", "VFS"], + "esp32_sdk": ["esp32_", "esp32c", "esp32s"], + "usb": ["usb_", "USB", "cdc_", "CDC"], + "i2c_driver": ["i2c_", "I2C"], + "i2s_driver": ["i2s_", "I2S"], + "spi_driver": ["spi_", "SPI"], + "adc_driver": ["adc_", "ADC"], + "dac_driver": ["dac_", "DAC"], + "touch_driver": ["touch_", "TOUCH"], + "pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"], + "rmt_driver": ["rmt_", "RMT"], + "pcnt_driver": ["pcnt_", "PCNT"], + "can_driver": ["can_", "CAN", "twai_", "TWAI"], + "sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"], + "temp_sensor": ["temp_sensor", "tsens_"], + "watchdog": ["wdt_", "WDT", "watchdog"], + "brownout": ["brownout", "bod_"], + "ulp": ["ulp_", "ULP"], + "psram": ["psram", "PSRAM", "spiram", "SPIRAM"], + "efuse": ["efuse", "EFUSE"], + "partition": ["partition", "esp_partition"], + "esp_event": ["esp_event", "event_loop", "event_callback"], + "esp_console": ["esp_console", "console_"], + "chip_specific": ["chip_", "esp_chip"], + "esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"], + "ipc": ["esp_ipc", "ipc_"], + "wifi_config": [ + "g_cnxMgr", + "gChmCxt", + "g_ic", + "TxRxCxt", + "s_dp", + "s_ni", + "s_reg_dump", + "packet$", + "d_mult_table", + "K", + "fcstab", + ], + "smartconfig": ["sc_ack_send"], + "rc_calibration": ["rc_cal", "rcUpdate"], + "noise_floor": ["noise_check"], + "rf_calibration": [ + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "rx_11b_opt", + ], + "wifi_crypto": [ + "pk_use_ecparams", + "process_segments", + "ccmp_", + "rc4_", + "aria_", + "mgf_mask", + "dh_group", + "ccmp_aad_nonce", + "ccmp_encrypt", + "rc4_skip", + "aria_sb1", + "aria_sb2", + "aria_is1", + "aria_is2", + "aria_sl", + "aria_a", + ], + "radio_control": ["fsm_input", "fsm_sconfreq"], + "pbuf": [ + "pbuf_", + ], + "event_group": ["xEventGroup"], + "ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"], + "provisioning": ["prov_", "prov_stop_and_notify"], + "scan": ["gScanStruct"], + "port": ["xPort"], + "elf_loader": [ + "elf_add", + "elf_add_note", + "elf_add_segment", + "process_image", + "read_encoded", + "read_encoded_value", + "read_encoded_value_with_base", + "process_image_header", + ], + "socket_api": [ + "sockets", + "netconn_", + "accept_function", + "recv_raw", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + ], + "igmp": ["igmp_", "igmp_send", "igmp_input"], + "icmp6": ["icmp6_"], + "arp": ["arp_table"], + "ampdu": [ + "ampdu_", + "rcAmpdu", + "trc_onAmpduOp", + "rcAmpduLowerRate", + "ampdu_dispatch_upto", + ], + "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], + "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], + "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], + "channel_mgmt": ["chm_init", "chm_set_current_channel"], + "trace": ["trc_init", "trc_onAmpduOp"], + "country_code": ["country_info", "country_info_24ghz"], + "multicore": ["do_multicore_settings"], + "Update_lib": ["Update"], + "stdio": [ + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + ], + "strncpy_ops": ["strncpy"], + "math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"], + "character_class": ["__chclass"], + "camellia": ["camellia_", "camellia_feistel"], + "crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"], + "event_buffer": ["g_eb_list_desc", "eb_space"], + "base_node": ["base_node_", "base_node_add_handler"], + "file_descriptor": ["s_fd_table"], + "tx_delay": ["tx_delay_cfg"], + "deinit": ["deinit_functions"], + "lcp_echo": ["LcpEchoCheck"], + "raw_api": ["raw_bind", "raw_connect"], + "checksum": ["process_checksum"], + "entry_management": ["add_entry"], + "esp_ota": ["esp_ota", "ota_", "read_otadata"], + "http_server": [ + "httpd_", + "parse_url_char", + "cb_headers_complete", + "delete_entry", + "validate_structure", + "config_save", + "config_new", + "verify_url", + "cb_url", + ], + "misc_system": [ + "alarm_cbs", + "start_up", + "tokens", + "unhex", + "osi_funcs_ro", + "enum_function", + "fragment_and_dispatch", + "alarm_set", + "osi_alarm_new", + "config_set_string", + "config_update_newest_section", + "config_remove_key", + "method_strings", + "interop_match", + "interop_database", + "__state_table", + "__action_table", + "s_stub_table", + "s_context", + "s_mmu_ctx", + "s_get_bus_mask", + "hli_queue_put", + "list_remove", + "list_delete", + "lock_acquire_generic", + "is_vect_desc_usable", + "io_mode_str", + "__c$20233", + "interface", + "read_id_core", + "subscribe_idle", + "unsubscribe_idle", + "s_clkout_handle", + "lock_release_generic", + "config_set_int", + "config_get_int", + "config_get_string", + "config_has_key", + "config_remove_section", + "osi_alarm_init", + "osi_alarm_deinit", + "fixed_queue_enqueue", + "fixed_queue_dequeue", + "fixed_queue_new", + "fixed_pkt_queue_enqueue", + "fixed_pkt_queue_new", + "list_append", + "list_prepend", + "list_insert_after", + "list_contains", + "list_get_node", + "hash_function_blob", + "cb_no_body", + "cb_on_body", + "profile_tab", + "get_arg", + "trim", + "buf$", + "process_appended_hash_and_sig$constprop$0", + "uuidType", + "allocate_svc_db_buf", + "_hostname_is_ours", + "s_hli_handlers", + "tick_cb", + "idle_cb", + "input", + "entry_find", + "section_find", + "find_bucket_entry_", + "config_has_section", + "hli_queue_create", + "hli_queue_get", + "hli_c_handler", + "future_ready", + "future_await", + "future_new", + "pkt_queue_enqueue", + "pkt_queue_dequeue", + "pkt_queue_cleanup", + "pkt_queue_create", + "pkt_queue_destroy", + "fixed_pkt_queue_dequeue", + "osi_alarm_cancel", + "osi_alarm_is_active", + "osi_sem_take", + "osi_event_create", + "osi_event_bind", + "alarm_cb_handler", + "list_foreach", + "list_back", + "list_front", + "list_clear", + "fixed_queue_try_peek_first", + "translate_path", + "get_idx", + "find_key", + "init", + "end", + "start", + "set_read_value", + "copy_address_list", + "copy_and_key", + "sdk_cfg_opts", + "leftshift_onebit", + "config_section_end", + "config_section_begin", + "find_entry_and_check_all_reset", + "image_validate", + "xPendingReadyList", + "vListInitialise", + "lock_init_generic", + "ant_bttx_cfg", + "ant_dft_cfg", + "cs_send_to_ctrl_sock", + "config_llc_util_funcs_reset", + "make_set_adv_report_flow_control", + "make_set_event_mask", + "raw_new", + "raw_remove", + "BTE_InitStack", + "parse_read_local_supported_features_response", + "__math_invalidf", + "tinytens", + "__mprec_tinytens", + "__mprec_bigtens", + "vRingbufferDelete", + "vRingbufferDeleteWithCaps", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "get_acl_data_size_ble", + "get_features_ble", + "get_features_classic", + "get_acl_packet_size_ble", + "get_acl_packet_size_classic", + "supports_extended_inquiry_response", + "supports_rssi_with_inquiry_results", + "supports_interlaced_inquiry_scan", + "supports_reading_remote_extended_features", + ], + "bluetooth_ll": [ + "lld_pdu_", + "ld_acl_", + "lld_stop_ind_handler", + "lld_evt_winsize_change", + "config_lld_evt_funcs_reset", + "config_lld_funcs_reset", + "config_llm_funcs_reset", + "llm_set_long_adv_data", + "lld_retry_tx_prog", + "llc_link_sup_to_ind_handler", + "config_llc_funcs_reset", + "lld_evt_rxwin_compute", + "config_btdm_funcs_reset", + "config_ea_funcs_reset", + "llc_defalut_state_tab_reset", + "config_rwip_funcs_reset", + "ke_lmp_rx_flooding_detect", + ], +} + +# Demangled patterns: patterns found in demangled C++ names +DEMANGLED_PATTERNS = { + "gpio_driver": ["GPIO"], + "uart_driver": ["UART"], + "network_stack": [ + "lwip", + "tcp", + "udp", + "ip4", + "ip6", + "dhcp", + "dns", + "netif", + "ethernet", + "ppp", + "slip", + ], + "wifi_stack": ["NetworkInterface"], + "nimble_bt": [ + "nimble", + "NimBLE", + "ble_hs", + "ble_gap", + "ble_gatt", + "ble_att", + "ble_l2cap", + "ble_sm", + ], + "crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"], + "cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"], + "static_init": ["__static_initialization"], + "rtti": ["__type_info", "__class_type_info"], + "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], + "async_tcp": ["AsyncClient", "AsyncServer"], + "mdns_lib": ["mdns"], + "json_lib": [ + "ArduinoJson", + "JsonDocument", + "JsonArray", + "JsonObject", + "deserialize", + "serialize", + ], + "http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"], + "logging": ["log", "Log", "print", "Print", "diag_"], + "authentication": ["checkDigestAuthentication"], + "libgcc": ["libgcc"], + "esp_system": ["esp_", "ESP"], + "arduino": ["arduino"], + "nvs": ["nvs_", "_ZTVN3nvs", "nvs::"], + "filesystem": ["spiffs", "vfs"], + "libc": ["newlib"], +} + + +# Get the list of actual ESPHome components by scanning the components directory +def get_esphome_components(): + """Get set of actual ESPHome components from the components directory.""" + components = set() + + # Find the components directory relative to this file + current_dir = Path(__file__).parent + components_dir = current_dir / "components" + + if components_dir.exists() and components_dir.is_dir(): + for item in components_dir.iterdir(): + if ( + item.is_dir() + and not item.name.startswith(".") + and not item.name.startswith("__") + ): + components.add(item.name) + + return components + + +# Cache the component list +ESPHOME_COMPONENTS = get_esphome_components() + + +class MemorySection: + """Represents a memory section with its symbols.""" + + def __init__(self, name: str): + self.name = name + self.symbols: list[tuple[str, int, str]] = [] # (symbol_name, size, component) + self.total_size = 0 + + +class ComponentMemory: + """Tracks memory usage for a component.""" + + def __init__(self, name: str): + self.name = name + self.text_size = 0 # Code in flash + self.rodata_size = 0 # Read-only data in flash + self.data_size = 0 # Initialized data (flash + ram) + self.bss_size = 0 # Uninitialized data (ram only) + self.symbol_count = 0 + + @property + def flash_total(self) -> int: + return self.text_size + self.rodata_size + self.data_size + + @property + def ram_total(self) -> int: + return self.data_size + self.bss_size + + +class MemoryAnalyzer: + """Analyzes memory usage from ELF files.""" + + def __init__( + self, + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + external_components: set[str] | None = None, + ): + self.elf_path = Path(elf_path) + if not self.elf_path.exists(): + raise FileNotFoundError(f"ELF file not found: {elf_path}") + + self.objdump_path = objdump_path or "objdump" + self.readelf_path = readelf_path or "readelf" + self.external_components = external_components or set() + + self.sections: dict[str, MemorySection] = {} + self.components: dict[str, ComponentMemory] = defaultdict( + lambda: ComponentMemory("") + ) + self._demangle_cache: dict[str, str] = {} + self._uncategorized_symbols: list[tuple[str, str, int]] = [] + self._esphome_core_symbols: list[ + tuple[str, str, int] + ] = [] # Track core symbols + self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) # Track symbols for all components + + def analyze(self) -> dict[str, ComponentMemory]: + """Analyze the ELF file and return component memory usage.""" + self._parse_sections() + self._parse_symbols() + self._categorize_symbols() + return dict(self.components) + + def _parse_sections(self) -> None: + """Parse section headers from ELF file.""" + try: + result = subprocess.run( + [self.readelf_path, "-S", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Parse section headers + for line in result.stdout.splitlines(): + # Look for section entries + match = re.match( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", + line, + ) + if match: + section_name = match.group(1) + size_hex = match.group(2) + size = int(size_hex, 16) + + # Map various section names to standard categories + mapped_section = None + if ".text" in section_name or ".iram" in section_name: + mapped_section = ".text" + elif ".rodata" in section_name: + mapped_section = ".rodata" + elif ".data" in section_name and "bss" not in section_name: + mapped_section = ".data" + elif ".bss" in section_name: + mapped_section = ".bss" + + if mapped_section: + if mapped_section not in self.sections: + self.sections[mapped_section] = MemorySection( + mapped_section + ) + self.sections[mapped_section].total_size += size + + except subprocess.CalledProcessError as e: + _LOGGER.error(f"Failed to parse sections: {e}") + raise + + def _parse_symbols(self) -> None: + """Parse symbols from ELF file.""" + # Section mapping - centralizes the logic + SECTION_MAPPING = { + ".text": [".text", ".iram"], + ".rodata": [".rodata"], + ".data": [".data", ".dram"], + ".bss": [".bss"], + } + + def map_section_name(raw_section: str) -> str | None: + """Map raw section name to standard section.""" + for standard_section, patterns in SECTION_MAPPING.items(): + if any(pattern in raw_section for pattern in patterns): + return standard_section + return None + + def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: + """Parse a single symbol line from objdump output. + + Returns (section, name, size, address) or None if not a valid symbol. + Format: address l/g w/d F/O section size name + Example: 40084870 l F .iram0.text 00000000 _xt_user_exc + """ + parts = line.split() + if len(parts) < 5: + return None + + try: + # Validate and extract address + address = parts[0] + int(address, 16) + except ValueError: + return None + + # Look for F (function) or O (object) flag + if "F" not in parts and "O" not in parts: + return None + + # Find section, size, and name + for i, part in enumerate(parts): + if part.startswith("."): + section = map_section_name(part) + if section and i + 1 < len(parts): + try: + size = int(parts[i + 1], 16) + if i + 2 < len(parts) and size > 0: + name = " ".join(parts[i + 2 :]) + return (section, name, size, address) + except ValueError: + pass + break + return None + + try: + result = subprocess.run( + [self.objdump_path, "-t", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Track seen addresses to avoid duplicates + seen_addresses: set[str] = set() + + for line in result.stdout.splitlines(): + symbol_info = parse_symbol_line(line) + if symbol_info: + section, name, size, address = symbol_info + # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) + if address not in seen_addresses and section in self.sections: + self.sections[section].symbols.append((name, size, "")) + seen_addresses.add(address) + + except subprocess.CalledProcessError as e: + _LOGGER.error(f"Failed to parse symbols: {e}") + raise + + def _categorize_symbols(self) -> None: + """Categorize symbols by component.""" + # First, collect all unique symbol names for batch demangling + all_symbols = set() + for section in self.sections.values(): + for symbol_name, _, _ in section.symbols: + all_symbols.add(symbol_name) + + # Batch demangle all symbols at once + self._batch_demangle_symbols(list(all_symbols)) + + # Now categorize with cached demangled names + for section_name, section in self.sections.items(): + for symbol_name, size, _ in section.symbols: + component = self._identify_component(symbol_name) + + if component not in self.components: + self.components[component] = ComponentMemory(component) + + comp_mem = self.components[component] + comp_mem.symbol_count += 1 + + if section_name == ".text": + comp_mem.text_size += size + elif section_name == ".rodata": + comp_mem.rodata_size += size + elif section_name == ".data": + comp_mem.data_size += size + elif section_name == ".bss": + comp_mem.bss_size += size + + # Track uncategorized symbols + if component == "other" and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._uncategorized_symbols.append((symbol_name, demangled, size)) + + # Track ESPHome core symbols for detailed analysis + if component == "[esphome]core" and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._esphome_core_symbols.append((symbol_name, demangled, size)) + + # Track all component symbols for detailed analysis + if size > 0: + demangled = self._demangle_symbol(symbol_name) + self._component_symbols[component].append( + (symbol_name, demangled, size) + ) + + def _identify_component(self, symbol_name: str) -> str: + """Identify which component a symbol belongs to.""" + # Demangle C++ names if needed + demangled = self._demangle_symbol(symbol_name) + + # Check for special component classes first (before namespace pattern) + # This handles cases like esphome::ESPHomeOTAComponent which should map to ota + if "esphome::" in demangled: + # Check for special component classes that include component name in the class + # For example: esphome::ESPHomeOTAComponent -> ota component + for component_name in ESPHOME_COMPONENTS: + # Check various naming patterns + component_upper = component_name.upper() + component_camel = component_name.replace("_", "").title() + patterns = [ + f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent + f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent + f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + ] + + if any(pattern in demangled for pattern in patterns): + return f"[esphome]{component_name}" + + # Check for ESPHome component namespaces + match = ESPHOME_COMPONENT_PATTERN.search(demangled) + if match: + component_name = match.group(1) + # Strip trailing underscore if present (e.g., switch_ -> switch) + component_name = component_name.rstrip("_") + + # Check if this is an actual component in the components directory + if component_name in ESPHOME_COMPONENTS: + return f"[esphome]{component_name}" + # Check if this is a known external component from the config + if component_name in self.external_components: + return f"[external]{component_name}" + # Everything else in esphome:: namespace is core + return "[esphome]core" + + # Check for esphome core namespace (no component namespace) + if "esphome::" in demangled: + # If no component match found, it's core + return "[esphome]core" + + # Check against symbol patterns + for component, patterns in SYMBOL_PATTERNS.items(): + if any(pattern in symbol_name for pattern in patterns): + return component + + # Check against demangled patterns + for component, patterns in DEMANGLED_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return component + + # Special cases that need more complex logic + + # Check if spi_flash vs spi_driver + if "spi_" in symbol_name or "SPI" in symbol_name: + if "spi_flash" in symbol_name: + return "spi_flash" + return "spi_driver" + + # libc special printf variants + if symbol_name.startswith("_") and symbol_name[1:].replace("_r", "").replace( + "v", "" + ).replace("s", "") in ["printf", "fprintf", "sprintf", "scanf"]: + return "libc" + + # Track uncategorized symbols for analysis + return "other" + + def _batch_demangle_symbols(self, symbols: list[str]) -> None: + """Batch demangle C++ symbol names for efficiency.""" + if not symbols: + return + + # Try to find the appropriate c++filt for the platform + cppfilt_cmd = "c++filt" + + # Check if we have a toolchain-specific c++filt + if self.objdump_path and self.objdump_path != "objdump": + # Replace objdump with c++filt in the path + potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") + if Path(potential_cppfilt).exists(): + cppfilt_cmd = potential_cppfilt + + try: + # Send all symbols to c++filt at once + result = subprocess.run( + [cppfilt_cmd], + input="\n".join(symbols), + capture_output=True, + text=True, + check=False, + ) + if result.returncode == 0: + demangled_lines = result.stdout.strip().split("\n") + # Map original to demangled names + for original, demangled in zip(symbols, demangled_lines): + self._demangle_cache[original] = demangled + else: + # If batch fails, cache originals + for symbol in symbols: + self._demangle_cache[symbol] = symbol + except Exception: + # On error, cache originals + for symbol in symbols: + self._demangle_cache[symbol] = symbol + + def _demangle_symbol(self, symbol: str) -> str: + """Get demangled C++ symbol name from cache.""" + return self._demangle_cache.get(symbol, symbol) + + def _categorize_esphome_core_symbol(self, demangled: str) -> str: + """Categorize ESPHome core symbols into subcategories.""" + # Dictionary of patterns for core subcategories + CORE_SUBCATEGORY_PATTERNS = { + "Component Framework": ["Component"], + "Application Core": ["Application"], + "Scheduler": ["Scheduler"], + "Logging": ["Logger", "log_"], + "Preferences": ["preferences", "Preferences"], + "Synchronization": ["Mutex", "Lock"], + "Helpers": ["Helper"], + "Network Utilities": ["network", "Network"], + "Time Management": ["time", "Time"], + "String Utilities": ["str_", "string"], + "Parsing/Formatting": ["parse_", "format_"], + "Optional Types": ["optional", "Optional"], + "Callbacks": ["Callback", "callback"], + "Color Utilities": ["Color"], + "C++ Operators": ["operator"], + "Global Variables": ["global_", "_GLOBAL"], + "Setup/Loop": ["setup", "loop"], + "System Control": ["reboot", "restart"], + "GPIO Management": ["GPIO", "gpio"], + "Interrupt Handling": ["ISR", "interrupt"], + "Hooks": ["Hook", "hook"], + "Entity Base Classes": ["Entity"], + "Automation Framework": ["automation", "Automation"], + "Automation Components": ["Condition", "Action", "Trigger"], + "Lambda Support": ["lambda"], + } + + # Special patterns that need to be checked separately + if any(pattern in demangled for pattern in ["vtable", "typeinfo", "thunk"]): + return "C++ Runtime (vtables/RTTI)" + + if demangled.startswith("std::"): + return "C++ STL" + + # Check against patterns + for category, patterns in CORE_SUBCATEGORY_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return category + + return "Other Core" + + def generate_report(self, detailed: bool = False) -> str: + """Generate a formatted memory report.""" + components = sorted( + self.components.items(), key=lambda x: x[1].flash_total, reverse=True + ) + + # Calculate totals + total_flash = sum(c.flash_total for _, c in components) + total_ram = sum(c.ram_total for _, c in components) + + # Build report + lines = [] + + # Column width constants + COL_COMPONENT = 29 + COL_FLASH_TEXT = 14 + COL_FLASH_DATA = 14 + COL_RAM_DATA = 12 + COL_RAM_BSS = 12 + COL_TOTAL_FLASH = 15 + COL_TOTAL_RAM = 12 + COL_SEPARATOR = 3 # " | " + + # Core analysis column widths + COL_CORE_SUBCATEGORY = 30 + COL_CORE_SIZE = 12 + COL_CORE_COUNT = 6 + COL_CORE_PERCENT = 10 + + # Calculate the exact table width + table_width = ( + COL_COMPONENT + + COL_SEPARATOR + + COL_FLASH_TEXT + + COL_SEPARATOR + + COL_FLASH_DATA + + COL_SEPARATOR + + COL_RAM_DATA + + COL_SEPARATOR + + COL_RAM_BSS + + COL_SEPARATOR + + COL_TOTAL_FLASH + + COL_SEPARATOR + + COL_TOTAL_RAM + ) + + lines.append("=" * table_width) + lines.append("Component Memory Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Main table - fixed column widths + lines.append( + f"{'Component':<{COL_COMPONENT}} | {'Flash (text)':>{COL_FLASH_TEXT}} | {'Flash (data)':>{COL_FLASH_DATA}} | {'RAM (data)':>{COL_RAM_DATA}} | {'RAM (bss)':>{COL_RAM_BSS}} | {'Total Flash':>{COL_TOTAL_FLASH}} | {'Total RAM':>{COL_TOTAL_RAM}}" + ) + lines.append( + "-" * COL_COMPONENT + + "-+-" + + "-" * COL_FLASH_TEXT + + "-+-" + + "-" * COL_FLASH_DATA + + "-+-" + + "-" * COL_RAM_DATA + + "-+-" + + "-" * COL_RAM_BSS + + "-+-" + + "-" * COL_TOTAL_FLASH + + "-+-" + + "-" * COL_TOTAL_RAM + ) + + for name, mem in components: + if mem.flash_total > 0 or mem.ram_total > 0: + flash_rodata = mem.rodata_size + mem.data_size + lines.append( + f"{name:<{COL_COMPONENT}} | {mem.text_size:>{COL_FLASH_TEXT - 2},} B | {flash_rodata:>{COL_FLASH_DATA - 2},} B | " + f"{mem.data_size:>{COL_RAM_DATA - 2},} B | {mem.bss_size:>{COL_RAM_BSS - 2},} B | " + f"{mem.flash_total:>{COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{COL_TOTAL_RAM - 2},} B" + ) + + lines.append( + "-" * COL_COMPONENT + + "-+-" + + "-" * COL_FLASH_TEXT + + "-+-" + + "-" * COL_FLASH_DATA + + "-+-" + + "-" * COL_RAM_DATA + + "-+-" + + "-" * COL_RAM_BSS + + "-+-" + + "-" * COL_TOTAL_FLASH + + "-+-" + + "-" * COL_TOTAL_RAM + ) + lines.append( + f"{'TOTAL':<{COL_COMPONENT}} | {' ':>{COL_FLASH_TEXT}} | {' ':>{COL_FLASH_DATA}} | " + f"{' ':>{COL_RAM_DATA}} | {' ':>{COL_RAM_BSS}} | " + f"{total_flash:>{COL_TOTAL_FLASH - 2},} B | {total_ram:>{COL_TOTAL_RAM - 2},} B" + ) + + # Top consumers + lines.append("") + lines.append("Top Flash Consumers:") + for i, (name, mem) in enumerate(components[:25]): + if mem.flash_total > 0: + percentage = ( + (mem.flash_total / total_flash * 100) if total_flash > 0 else 0 + ) + lines.append( + f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash" + ) + + lines.append("") + lines.append("Top RAM Consumers:") + ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True) + for i, (name, mem) in enumerate(ram_components[:25]): + if mem.ram_total > 0: + percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0 + lines.append( + f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM" + ) + + lines.append("") + lines.append( + "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." + ) + lines.append("=" * table_width) + + # Add ESPHome core detailed analysis if there are core symbols + if self._esphome_core_symbols: + lines.append("") + lines.append("=" * table_width) + lines.append("[esphome]core Detailed Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Group core symbols by subcategory + core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) + + for symbol, demangled, size in self._esphome_core_symbols: + # Categorize based on demangled name patterns + subcategory = self._categorize_esphome_core_symbol(demangled) + core_subcategories[subcategory].append((symbol, demangled, size)) + + # Sort subcategories by total size + sorted_subcategories = sorted( + [ + (name, symbols, sum(s[2] for s in symbols)) + for name, symbols in core_subcategories.items() + ], + key=lambda x: x[2], + reverse=True, + ) + + lines.append( + f"{'Subcategory':<{COL_CORE_SUBCATEGORY}} | {'Size':>{COL_CORE_SIZE}} | " + f"{'Count':>{COL_CORE_COUNT}} | {'% of Core':>{COL_CORE_PERCENT}}" + ) + lines.append( + "-" * COL_CORE_SUBCATEGORY + + "-+-" + + "-" * COL_CORE_SIZE + + "-+-" + + "-" * COL_CORE_COUNT + + "-+-" + + "-" * COL_CORE_PERCENT + ) + + core_total = sum(size for _, _, size in self._esphome_core_symbols) + + for subcategory, symbols, total_size in sorted_subcategories: + percentage = (total_size / core_total * 100) if core_total > 0 else 0 + lines.append( + f"{subcategory:<{COL_CORE_SUBCATEGORY}} | {total_size:>{COL_CORE_SIZE - 2},} B | " + f"{len(symbols):>{COL_CORE_COUNT}} | {percentage:>{COL_CORE_PERCENT - 1}.1f}%" + ) + + # Top 10 largest core symbols + lines.append("") + lines.append("Top 10 Largest [esphome]core Symbols:") + sorted_core_symbols = sorted( + self._esphome_core_symbols, key=lambda x: x[2], reverse=True + ) + + for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * table_width) + + # Add detailed analysis for top ESPHome and external components + esphome_components = [ + (name, mem) + for name, mem in components + if name.startswith("[esphome]") and name != "[esphome]core" + ] + external_components = [ + (name, mem) for name, mem in components if name.startswith("[external]") + ] + + top_esphome_components = sorted( + esphome_components, key=lambda x: x[1].flash_total, reverse=True + )[:30] + + # Include all external components (they're usually important) + top_external_components = sorted( + external_components, key=lambda x: x[1].flash_total, reverse=True + ) + + # Check if API component exists and ensure it's included + api_component = None + for name, mem in components: + if name == "[esphome]api": + api_component = (name, mem) + break + + # Combine all components to analyze: top ESPHome + all external + API if not already included + components_to_analyze = list(top_esphome_components) + list( + top_external_components + ) + if api_component and api_component not in components_to_analyze: + components_to_analyze.append(api_component) + + if components_to_analyze: + for comp_name, comp_mem in components_to_analyze: + comp_symbols = self._component_symbols.get(comp_name, []) + if comp_symbols: + lines.append("") + lines.append("=" * table_width) + lines.append(f"{comp_name} Detailed Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Sort symbols by size + sorted_symbols = sorted( + comp_symbols, key=lambda x: x[2], reverse=True + ) + + lines.append(f"Total symbols: {len(sorted_symbols)}") + lines.append(f"Total size: {comp_mem.flash_total:,} B") + lines.append("") + + # Show all symbols > 100 bytes for better visibility + large_symbols = [ + (sym, dem, size) + for sym, dem, size in sorted_symbols + if size > 100 + ] + + lines.append( + f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" + ) + for i, (symbol, demangled, size) in enumerate(large_symbols): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * table_width) + + return "\n".join(lines) + + def to_json(self) -> str: + """Export analysis results as JSON.""" + data = { + "components": { + name: { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + for name, mem in self.components.items() + }, + "totals": { + "flash": sum(c.flash_total for c in self.components.values()), + "ram": sum(c.ram_total for c in self.components.values()), + }, + } + return json.dumps(data, indent=2) + + def dump_uncategorized_symbols(self, output_file: str | None = None) -> None: + """Dump uncategorized symbols for analysis.""" + # Sort by size descending + sorted_symbols = sorted( + self._uncategorized_symbols, key=lambda x: x[2], reverse=True + ) + + lines = ["Uncategorized Symbols Analysis", "=" * 80] + lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}") + lines.append( + f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes" + ) + lines.append("") + lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled") + lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) + + for symbol, demangled, size in sorted_symbols[:100]: # Top 100 + if symbol != demangled: + lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled[:100]}") + else: + lines.append(f"{size:>10,} | {symbol[:60]:<60} | [not demangled]") + + if len(sorted_symbols) > 100: + lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") + + content = "\n".join(lines) + + if output_file: + with open(output_file, "w") as f: + f.write(content) + else: + print(content) + + +def analyze_elf( + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + detailed: bool = False, + external_components: set[str] | None = None, +) -> str: + """Analyze an ELF file and return a memory report.""" + analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path, external_components) + analyzer.analyze() + return analyzer.generate_report(detailed) + + +if __name__ == "__main__": + import sys + + if len(sys.argv) < 2: + print("Usage: analyze_memory.py ") + sys.exit(1) + + try: + report = analyze_elf(sys.argv[1]) + print(report) + except Exception as e: + print(f"Error: {e}") + sys.exit(1) From c7c408e6670e5223cd4c7abf1be634926f7043cf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:28:13 -1000 Subject: [PATCH 018/336] tweak --- .github/workflows/ci.yml | 36 +++++++ script/ci_memory_impact_comment.py | 147 ++++++++++++++++++++++++++++- 2 files changed, 182 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4682a05fea..d87945f8df 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -559,6 +559,24 @@ jobs: echo "Compiling $component for $platform using $test_file" python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ python script/ci_memory_impact_extract.py --output-env + - name: Find and upload ELF file + run: | + # Find the most recently created .elf file in .esphome/build + elf_file=$(find ~/.esphome/build -name "*.elf" -type f -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-) + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then + echo "Found ELF file: $elf_file" + mkdir -p ./elf-artifacts + cp "$elf_file" ./elf-artifacts/target.elf + else + echo "Warning: No ELF file found" + fi + - name: Upload ELF artifact + uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 + with: + name: memory-impact-target-elf + path: ./elf-artifacts/target.elf + if-no-files-found: warn + retention-days: 1 memory-impact-pr-branch: name: Build PR branch for memory impact @@ -594,6 +612,24 @@ jobs: echo "Compiling $component for $platform using $test_file" python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ python script/ci_memory_impact_extract.py --output-env + - name: Find and upload ELF file + run: | + # Find the most recently created .elf file in .esphome/build + elf_file=$(find ~/.esphome/build -name "*.elf" -type f -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-) + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then + echo "Found ELF file: $elf_file" + mkdir -p ./elf-artifacts + cp "$elf_file" ./elf-artifacts/pr.elf + else + echo "Warning: No ELF file found" + fi + - name: Upload ELF artifact + uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 + with: + name: memory-impact-pr-elf + path: ./elf-artifacts/pr.elf + if-no-files-found: warn + retention-days: 1 memory-impact-comment: name: Comment memory impact diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index e3e70d601f..f724a77c67 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -10,9 +10,16 @@ from __future__ import annotations import argparse import json +from pathlib import Path import subprocess import sys +# Add esphome to path for analyze_memory import +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position +from esphome.analyze_memory import MemoryAnalyzer + # Comment marker to identify our memory impact comments COMMENT_MARKER = "" @@ -64,6 +71,105 @@ def format_change(before: int, after: int) -> str: return f"{emoji} {delta_str} ({pct_str})" +def run_detailed_analysis( + elf_path: str, objdump_path: str | None = None, readelf_path: str | None = None +) -> dict | None: + """Run detailed memory analysis on an ELF file. + + Args: + elf_path: Path to ELF file + objdump_path: Optional path to objdump tool + readelf_path: Optional path to readelf tool + + Returns: + Dictionary with component memory breakdown or None if analysis fails + """ + try: + analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path) + components = analyzer.analyze() + + # Convert ComponentMemory objects to dictionaries + result = {} + for name, mem in components.items(): + result[name] = { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + return result + except Exception as e: + print(f"Warning: Failed to run detailed analysis: {e}", file=sys.stderr) + return None + + +def create_detailed_breakdown_table( + target_analysis: dict | None, pr_analysis: dict | None +) -> str: + """Create a markdown table showing detailed memory breakdown by component. + + Args: + target_analysis: Component memory breakdown for target branch + pr_analysis: Component memory breakdown for PR branch + + Returns: + Formatted markdown table + """ + if not target_analysis or not pr_analysis: + return "" + + # Combine all components from both analyses + all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) + + # Filter to components that have changed or are significant + changed_components = [] + for comp in all_components: + target_mem = target_analysis.get(comp, {}) + pr_mem = pr_analysis.get(comp, {}) + + target_flash = target_mem.get("flash_total", 0) + pr_flash = pr_mem.get("flash_total", 0) + + # Include if component has changed or is significant (> 1KB) + if target_flash != pr_flash or target_flash > 1024 or pr_flash > 1024: + delta = pr_flash - target_flash + changed_components.append((comp, target_flash, pr_flash, delta)) + + if not changed_components: + return "" + + # Sort by absolute delta (largest changes first) + changed_components.sort(key=lambda x: abs(x[3]), reverse=True) + + # Build table - limit to top 20 changes + lines = [ + "", + "
", + "📊 Detailed Memory Breakdown (click to expand)", + "", + "| Component | Target Flash | PR Flash | Change |", + "|-----------|--------------|----------|--------|", + ] + + for comp, target_flash, pr_flash, delta in changed_components[:20]: + target_str = format_bytes(target_flash) + pr_str = format_bytes(pr_flash) + change_str = format_change(target_flash, pr_flash) + lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |") + + if len(changed_components) > 20: + lines.append( + f"| ... | ... | ... | *({len(changed_components) - 20} more components not shown)* |" + ) + + lines.extend(["", "
", ""]) + + return "\n".join(lines) + + def create_comment_body( component: str, platform: str, @@ -71,6 +177,10 @@ def create_comment_body( target_flash: int, pr_ram: int, pr_flash: int, + target_elf: str | None = None, + pr_elf: str | None = None, + objdump_path: str | None = None, + readelf_path: str | None = None, ) -> str: """Create the comment body with memory impact analysis. @@ -81,6 +191,10 @@ def create_comment_body( target_flash: Flash usage in target branch pr_ram: RAM usage in PR branch pr_flash: Flash usage in PR branch + target_elf: Optional path to target branch ELF file + pr_elf: Optional path to PR branch ELF file + objdump_path: Optional path to objdump tool + readelf_path: Optional path to readelf tool Returns: Formatted comment body @@ -88,6 +202,25 @@ def create_comment_body( ram_change = format_change(target_ram, pr_ram) flash_change = format_change(target_flash, pr_flash) + # Run detailed analysis if ELF files are provided + target_analysis = None + pr_analysis = None + detailed_breakdown = "" + + if target_elf and pr_elf: + print( + f"Running detailed analysis on {target_elf} and {pr_elf}", file=sys.stderr + ) + target_analysis = run_detailed_analysis(target_elf, objdump_path, readelf_path) + pr_analysis = run_detailed_analysis(pr_elf, objdump_path, readelf_path) + + if target_analysis and pr_analysis: + detailed_breakdown = create_detailed_breakdown_table( + target_analysis, pr_analysis + ) + else: + print("No ELF files provided, skipping detailed analysis", file=sys.stderr) + return f"""{COMMENT_MARKER} ## Memory Impact Analysis @@ -98,7 +231,7 @@ def create_comment_body( |--------|--------------|---------|--------| | **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} | | **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | - +{detailed_breakdown} --- *This analysis runs automatically when a single component changes. Memory usage is measured from a representative test configuration.* """ @@ -263,6 +396,14 @@ def main() -> int: parser.add_argument( "--pr-flash", type=int, required=True, help="PR branch flash usage" ) + parser.add_argument("--target-elf", help="Optional path to target branch ELF file") + parser.add_argument("--pr-elf", help="Optional path to PR branch ELF file") + parser.add_argument( + "--objdump-path", help="Optional path to objdump tool for detailed analysis" + ) + parser.add_argument( + "--readelf-path", help="Optional path to readelf tool for detailed analysis" + ) args = parser.parse_args() @@ -274,6 +415,10 @@ def main() -> int: target_flash=args.target_flash, pr_ram=args.pr_ram, pr_flash=args.pr_flash, + target_elf=args.target_elf, + pr_elf=args.pr_elf, + objdump_path=args.objdump_path, + readelf_path=args.readelf_path, ) # Post or update comment From 59848a2c8acbefc30ad944eeac11e23ca91b5824 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:31:04 -1000 Subject: [PATCH 019/336] tweak --- .github/workflows/ci.yml | 71 ++++++++++-- script/ci_memory_impact_comment.py | 177 +++++++++++++++++++++++++++-- 2 files changed, 229 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d87945f8df..d5f9bdca13 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -561,14 +561,26 @@ jobs: python script/ci_memory_impact_extract.py --output-env - name: Find and upload ELF file run: | - # Find the most recently created .elf file in .esphome/build - elf_file=$(find ~/.esphome/build -name "*.elf" -type f -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-) + # Find the ELF file - try both common locations + elf_file="" + + # Try .esphome/build first (default location) + if [ -d ~/.esphome/build ]; then + elf_file=$(find ~/.esphome/build -name "firmware.elf" -o -name "*.elf" | head -1) + fi + + # Fallback to finding in .platformio if not found + if [ -z "$elf_file" ] && [ -d ~/.platformio ]; then + elf_file=$(find ~/.platformio -name "firmware.elf" | head -1) + fi + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then echo "Found ELF file: $elf_file" mkdir -p ./elf-artifacts cp "$elf_file" ./elf-artifacts/target.elf else - echo "Warning: No ELF file found" + echo "Warning: No ELF file found in ~/.esphome/build or ~/.platformio" + ls -la ~/.esphome/build/ || true fi - name: Upload ELF artifact uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 @@ -614,14 +626,26 @@ jobs: python script/ci_memory_impact_extract.py --output-env - name: Find and upload ELF file run: | - # Find the most recently created .elf file in .esphome/build - elf_file=$(find ~/.esphome/build -name "*.elf" -type f -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2-) + # Find the ELF file - try both common locations + elf_file="" + + # Try .esphome/build first (default location) + if [ -d ~/.esphome/build ]; then + elf_file=$(find ~/.esphome/build -name "firmware.elf" -o -name "*.elf" | head -1) + fi + + # Fallback to finding in .platformio if not found + if [ -z "$elf_file" ] && [ -d ~/.platformio ]; then + elf_file=$(find ~/.platformio -name "firmware.elf" | head -1) + fi + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then echo "Found ELF file: $elf_file" mkdir -p ./elf-artifacts cp "$elf_file" ./elf-artifacts/pr.elf else - echo "Warning: No ELF file found" + echo "Warning: No ELF file found in ~/.esphome/build or ~/.platformio" + ls -la ~/.esphome/build/ || true fi - name: Upload ELF artifact uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 @@ -651,6 +675,18 @@ jobs: with: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} + - name: Download target ELF artifact + uses: actions/download-artifact@1a18f44933c290e06e7167a92071e78bb20ab94a # v4.4.2 + with: + name: memory-impact-target-elf + path: ./elf-artifacts/target + continue-on-error: true + - name: Download PR ELF artifact + uses: actions/download-artifact@1a18f44933c290e06e7167a92071e78bb20ab94a # v4.4.2 + with: + name: memory-impact-pr-elf + path: ./elf-artifacts/pr + continue-on-error: true - name: Post or update PR comment env: GH_TOKEN: ${{ github.token }} @@ -662,6 +698,25 @@ jobs: PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }} run: | . venv/bin/activate + + # Check if ELF files exist + target_elf_arg="" + pr_elf_arg="" + + if [ -f ./elf-artifacts/target/target.elf ]; then + echo "Found target ELF file" + target_elf_arg="--target-elf ./elf-artifacts/target/target.elf" + else + echo "No target ELF file found" + fi + + if [ -f ./elf-artifacts/pr/pr.elf ]; then + echo "Found PR ELF file" + pr_elf_arg="--pr-elf ./elf-artifacts/pr/pr.elf" + else + echo "No PR ELF file found" + fi + python script/ci_memory_impact_comment.py \ --pr-number "${{ github.event.pull_request.number }}" \ --component "$COMPONENT" \ @@ -669,7 +724,9 @@ jobs: --target-ram "$TARGET_RAM" \ --target-flash "$TARGET_FLASH" \ --pr-ram "$PR_RAM" \ - --pr-flash "$PR_FLASH" + --pr-flash "$PR_FLASH" \ + $target_elf_arg \ + $pr_elf_arg ci-status: name: CI Status diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index f724a77c67..0b3bf87590 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -73,7 +73,7 @@ def format_change(before: int, after: int) -> str: def run_detailed_analysis( elf_path: str, objdump_path: str | None = None, readelf_path: str | None = None -) -> dict | None: +) -> tuple[dict | None, dict | None]: """Run detailed memory analysis on an ELF file. Args: @@ -82,16 +82,18 @@ def run_detailed_analysis( readelf_path: Optional path to readelf tool Returns: - Dictionary with component memory breakdown or None if analysis fails + Tuple of (component_breakdown, symbol_map) or (None, None) if analysis fails + component_breakdown: Dictionary with component memory breakdown + symbol_map: Dictionary mapping symbol names to their sizes """ try: analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path) components = analyzer.analyze() # Convert ComponentMemory objects to dictionaries - result = {} + component_result = {} for name, mem in components.items(): - result[name] = { + component_result[name] = { "text": mem.text_size, "rodata": mem.rodata_size, "data": mem.data_size, @@ -100,10 +102,151 @@ def run_detailed_analysis( "ram_total": mem.ram_total, "symbol_count": mem.symbol_count, } - return result + + # Build symbol map from all sections + symbol_map = {} + for section in analyzer.sections.values(): + for symbol_name, size, _ in section.symbols: + if size > 0: # Only track non-zero sized symbols + # Demangle the symbol for better readability + demangled = analyzer._demangle_symbol(symbol_name) + symbol_map[demangled] = size + + return component_result, symbol_map except Exception as e: print(f"Warning: Failed to run detailed analysis: {e}", file=sys.stderr) - return None + import traceback + + traceback.print_exc(file=sys.stderr) + return None, None + + +def create_symbol_changes_table( + target_symbols: dict | None, pr_symbols: dict | None +) -> str: + """Create a markdown table showing symbols that changed size. + + Args: + target_symbols: Symbol name to size mapping for target branch + pr_symbols: Symbol name to size mapping for PR branch + + Returns: + Formatted markdown table + """ + if not target_symbols or not pr_symbols: + return "" + + # Find all symbols that exist in both branches or only in one + all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) + + # Track changes + changed_symbols = [] + new_symbols = [] + removed_symbols = [] + + for symbol in all_symbols: + target_size = target_symbols.get(symbol, 0) + pr_size = pr_symbols.get(symbol, 0) + + if target_size == 0 and pr_size > 0: + # New symbol + new_symbols.append((symbol, pr_size)) + elif target_size > 0 and pr_size == 0: + # Removed symbol + removed_symbols.append((symbol, target_size)) + elif target_size != pr_size: + # Changed symbol + delta = pr_size - target_size + changed_symbols.append((symbol, target_size, pr_size, delta)) + + if not changed_symbols and not new_symbols and not removed_symbols: + return "" + + lines = [ + "", + "
", + "🔍 Symbol-Level Changes (click to expand)", + "", + ] + + # Show changed symbols (sorted by absolute delta) + if changed_symbols: + changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) + lines.extend( + [ + "### Changed Symbols", + "", + "| Symbol | Target Size | PR Size | Change |", + "|--------|-------------|---------|--------|", + ] + ) + + # Show top 30 changes + for symbol, target_size, pr_size, delta in changed_symbols[:30]: + target_str = format_bytes(target_size) + pr_str = format_bytes(pr_size) + change_str = format_change(target_size, pr_size) + # Truncate very long symbol names + display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." + lines.append( + f"| `{display_symbol}` | {target_str} | {pr_str} | {change_str} |" + ) + + if len(changed_symbols) > 30: + lines.append( + f"| ... | ... | ... | *({len(changed_symbols) - 30} more changed symbols not shown)* |" + ) + lines.append("") + + # Show new symbols + if new_symbols: + new_symbols.sort(key=lambda x: x[1], reverse=True) + lines.extend( + [ + "### New Symbols (top 15)", + "", + "| Symbol | Size |", + "|--------|------|", + ] + ) + + for symbol, size in new_symbols[:15]: + display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." + lines.append(f"| `{display_symbol}` | {format_bytes(size)} |") + + if len(new_symbols) > 15: + total_new_size = sum(s[1] for s in new_symbols) + lines.append( + f"| *{len(new_symbols) - 15} more new symbols...* | *Total: {format_bytes(total_new_size)}* |" + ) + lines.append("") + + # Show removed symbols + if removed_symbols: + removed_symbols.sort(key=lambda x: x[1], reverse=True) + lines.extend( + [ + "### Removed Symbols (top 15)", + "", + "| Symbol | Size |", + "|--------|------|", + ] + ) + + for symbol, size in removed_symbols[:15]: + display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." + lines.append(f"| `{display_symbol}` | {format_bytes(size)} |") + + if len(removed_symbols) > 15: + total_removed_size = sum(s[1] for s in removed_symbols) + lines.append( + f"| *{len(removed_symbols) - 15} more removed symbols...* | *Total: {format_bytes(total_removed_size)}* |" + ) + lines.append("") + + lines.extend(["
", ""]) + + return "\n".join(lines) def create_detailed_breakdown_table( @@ -148,7 +291,7 @@ def create_detailed_breakdown_table( lines = [ "", "
", - "📊 Detailed Memory Breakdown (click to expand)", + "📊 Component Memory Breakdown (click to expand)", "", "| Component | Target Flash | PR Flash | Change |", "|-----------|--------------|----------|--------|", @@ -205,19 +348,29 @@ def create_comment_body( # Run detailed analysis if ELF files are provided target_analysis = None pr_analysis = None - detailed_breakdown = "" + target_symbols = None + pr_symbols = None + component_breakdown = "" + symbol_changes = "" if target_elf and pr_elf: print( f"Running detailed analysis on {target_elf} and {pr_elf}", file=sys.stderr ) - target_analysis = run_detailed_analysis(target_elf, objdump_path, readelf_path) - pr_analysis = run_detailed_analysis(pr_elf, objdump_path, readelf_path) + target_analysis, target_symbols = run_detailed_analysis( + target_elf, objdump_path, readelf_path + ) + pr_analysis, pr_symbols = run_detailed_analysis( + pr_elf, objdump_path, readelf_path + ) if target_analysis and pr_analysis: - detailed_breakdown = create_detailed_breakdown_table( + component_breakdown = create_detailed_breakdown_table( target_analysis, pr_analysis ) + + if target_symbols and pr_symbols: + symbol_changes = create_symbol_changes_table(target_symbols, pr_symbols) else: print("No ELF files provided, skipping detailed analysis", file=sys.stderr) @@ -231,7 +384,7 @@ def create_comment_body( |--------|--------------|---------|--------| | **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} | | **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | -{detailed_breakdown} +{component_breakdown}{symbol_changes} --- *This analysis runs automatically when a single component changes. Memory usage is measured from a representative test configuration.* """ From 9d081795e8b64451cbe1533638f7dd5501435c3b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:41:55 -1000 Subject: [PATCH 020/336] relo --- .github/workflows/ci.yml | 4 +- .../__init__.py} | 859 +----------------- esphome/analyze_memory/const.py | 857 +++++++++++++++++ script/ci_memory_impact_comment.py | 2 +- 4 files changed, 864 insertions(+), 858 deletions(-) rename esphome/{analyze_memory.py => analyze_memory/__init__.py} (56%) create mode 100644 esphome/analyze_memory/const.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d5f9bdca13..6fa8150b93 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -583,7 +583,7 @@ jobs: ls -la ~/.esphome/build/ || true fi - name: Upload ELF artifact - uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: memory-impact-target-elf path: ./elf-artifacts/target.elf @@ -648,7 +648,7 @@ jobs: ls -la ~/.esphome/build/ || true fi - name: Upload ELF artifact - uses: actions/upload-artifact@ea05be8e2b5c27c5689e977ed6f65db0a051b1e5 # v4.6.0 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: memory-impact-pr-elf path: ./elf-artifacts/pr.elf diff --git a/esphome/analyze_memory.py b/esphome/analyze_memory/__init__.py similarity index 56% rename from esphome/analyze_memory.py rename to esphome/analyze_memory/__init__.py index 70c324b33f..c6fdb1028d 100644 --- a/esphome/analyze_memory.py +++ b/esphome/analyze_memory/__init__.py @@ -7,862 +7,10 @@ from pathlib import Path import re import subprocess +from .const import DEMANGLED_PATTERNS, ESPHOME_COMPONENT_PATTERN, SYMBOL_PATTERNS + _LOGGER = logging.getLogger(__name__) -# Pattern to extract ESPHome component namespaces dynamically -ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") - -# Component identification rules -# Symbol patterns: patterns found in raw symbol names -SYMBOL_PATTERNS = { - "freertos": [ - "vTask", - "xTask", - "xQueue", - "pvPort", - "vPort", - "uxTask", - "pcTask", - "prvTimerTask", - "prvAddNewTaskToReadyList", - "pxReadyTasksLists", - "prvAddCurrentTaskToDelayedList", - "xEventGroupWaitBits", - "xRingbufferSendFromISR", - "prvSendItemDoneNoSplit", - "prvReceiveGeneric", - "prvSendAcquireGeneric", - "prvCopyItemAllowSplit", - "xEventGroup", - "xRingbuffer", - "prvSend", - "prvReceive", - "prvCopy", - "xPort", - "ulTaskGenericNotifyTake", - "prvIdleTask", - "prvInitialiseNewTask", - "prvIsYieldRequiredSMP", - "prvGetItemByteBuf", - "prvInitializeNewRingbuffer", - "prvAcquireItemNoSplit", - "prvNotifyQueueSetContainer", - "ucStaticTimerQueueStorage", - "eTaskGetState", - "main_task", - "do_system_init_fn", - "xSemaphoreCreateGenericWithCaps", - "vListInsert", - "uxListRemove", - "vRingbufferReturnItem", - "vRingbufferReturnItemFromISR", - "prvCheckItemFitsByteBuffer", - "prvGetCurMaxSizeAllowSplit", - "tick_hook", - "sys_sem_new", - "sys_arch_mbox_fetch", - "sys_arch_sem_wait", - "prvDeleteTCB", - "vQueueDeleteWithCaps", - "vRingbufferDeleteWithCaps", - "vSemaphoreDeleteWithCaps", - "prvCheckItemAvail", - "prvCheckTaskCanBeScheduledSMP", - "prvGetCurMaxSizeNoSplit", - "prvResetNextTaskUnblockTime", - "prvReturnItemByteBuf", - "vApplicationStackOverflowHook", - "vApplicationGetIdleTaskMemory", - "sys_init", - "sys_mbox_new", - "sys_arch_mbox_tryfetch", - ], - "xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"], - "heap": ["heap_", "multi_heap"], - "spi_flash": ["spi_flash"], - "rtc": ["rtc_", "rtcio_ll_"], - "gpio_driver": ["gpio_", "pins"], - "uart_driver": ["uart", "_uart", "UART"], - "timer": ["timer_", "esp_timer"], - "peripherals": ["periph_", "periman"], - "network_stack": [ - "vj_compress", - "raw_sendto", - "raw_input", - "etharp_", - "icmp_input", - "socket_ipv6", - "ip_napt", - "socket_ipv4_multicast", - "socket_ipv6_multicast", - "netconn_", - "recv_raw", - "accept_function", - "netconn_recv_data", - "netconn_accept", - "netconn_write_vectors_partly", - "netconn_drain", - "raw_connect", - "raw_bind", - "icmp_send_response", - "sockets", - "icmp_dest_unreach", - "inet_chksum_pseudo", - "alloc_socket", - "done_socket", - "set_global_fd_sets", - "inet_chksum_pbuf", - "tryget_socket_unconn_locked", - "tryget_socket_unconn", - "cs_create_ctrl_sock", - "netbuf_alloc", - ], - "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], - "wifi_stack": [ - "ieee80211", - "hostap", - "sta_", - "ap_", - "scan_", - "wifi_", - "wpa_", - "wps_", - "esp_wifi", - "cnx_", - "wpa3_", - "sae_", - "wDev_", - "ic_", - "mac_", - "esf_buf", - "gWpaSm", - "sm_WPA", - "eapol_", - "owe_", - "wifiLowLevelInit", - "s_do_mapping", - "gScanStruct", - "ppSearchTxframe", - "ppMapWaitTxq", - "ppFillAMPDUBar", - "ppCheckTxConnTrafficIdle", - "ppCalTkipMic", - ], - "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], - "wifi_bt_coex": ["coex"], - "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], - "bluedroid_bt": [ - "bluedroid", - "btc_", - "bta_", - "btm_", - "btu_", - "BTM_", - "GATT", - "L2CA_", - "smp_", - "gatts_", - "attp_", - "l2cu_", - "l2cb", - "smp_cb", - "BTA_GATTC_", - "SMP_", - "BTU_", - "BTA_Dm", - "GAP_Ble", - "BT_tx_if", - "host_recv_pkt_cb", - "saved_local_oob_data", - "string_to_bdaddr", - "string_is_bdaddr", - "CalConnectParamTimeout", - "transmit_fragment", - "transmit_data", - "event_command_ready", - "read_command_complete_header", - "parse_read_local_extended_features_response", - "parse_read_local_version_info_response", - "should_request_high", - "btdm_wakeup_request", - "BTA_SetAttributeValue", - "BTA_EnableBluetooth", - "transmit_command_futured", - "transmit_command", - "get_waiting_command", - "make_command", - "transmit_downward", - "host_recv_adv_packet", - "copy_extra_byte_in_db", - "parse_read_local_supported_commands_response", - ], - "crypto_math": [ - "ecp_", - "bignum_", - "mpi_", - "sswu", - "modp", - "dragonfly_", - "gcm_mult", - "__multiply", - "quorem", - "__mdiff", - "__lshift", - "__mprec_tens", - "ECC_", - "multiprecision_", - "mix_sub_columns", - "sbox", - "gfm2_sbox", - "gfm3_sbox", - "curve_p256", - "curve", - "p_256_init_curve", - "shift_sub_rows", - "rshift", - ], - "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], - "libc": [ - "printf", - "scanf", - "malloc", - "free", - "memcpy", - "memset", - "strcpy", - "strlen", - "_dtoa", - "_fopen", - "__sfvwrite_r", - "qsort", - "__sf", - "__sflush_r", - "__srefill_r", - "_impure_data", - "_reclaim_reent", - "_open_r", - "strncpy", - "_strtod_l", - "__gethex", - "__hexnan", - "_setenv_r", - "_tzset_unlocked_r", - "__tzcalc_limits", - "select", - "scalbnf", - "strtof", - "strtof_l", - "__d2b", - "__b2d", - "__s2b", - "_Balloc", - "__multadd", - "__lo0bits", - "__atexit0", - "__smakebuf_r", - "__swhatbuf_r", - "_sungetc_r", - "_close_r", - "_link_r", - "_unsetenv_r", - "_rename_r", - "__month_lengths", - "tzinfo", - "__ratio", - "__hi0bits", - "__ulp", - "__any_on", - "__copybits", - "L_shift", - "_fcntl_r", - "_lseek_r", - "_read_r", - "_write_r", - "_unlink_r", - "_fstat_r", - "access", - "fsync", - "tcsetattr", - "tcgetattr", - "tcflush", - "tcdrain", - "__ssrefill_r", - "_stat_r", - "__hexdig_fun", - "__mcmp", - "_fwalk_sglue", - "__fpclassifyf", - "_setlocale_r", - "_mbrtowc_r", - "fcntl", - "__match", - "_lock_close", - "__c$", - "__func__$", - "__FUNCTION__$", - "DAYS_IN_MONTH", - "_DAYS_BEFORE_MONTH", - "CSWTCH$", - "dst$", - "sulp", - ], - "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], - "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], - "file_io": [ - "fread", - "fwrite", - "fopen", - "fclose", - "fseek", - "ftell", - "fflush", - "s_fd_table", - ], - "string_formatting": [ - "snprintf", - "vsnprintf", - "sprintf", - "vsprintf", - "sscanf", - "vsscanf", - ], - "cpp_anonymous": ["_GLOBAL__N_", "n$"], - "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], - "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], - "static_init": ["_GLOBAL__sub_I_"], - "mdns_lib": ["mdns"], - "phy_radio": [ - "phy_", - "rf_", - "chip_", - "register_chipv7", - "pbus_", - "bb_", - "fe_", - "rfcal_", - "ram_rfcal", - "tx_pwctrl", - "rx_chan", - "set_rx_gain", - "set_chan", - "agc_reg", - "ram_txiq", - "ram_txdc", - "ram_gen_rx_gain", - "rx_11b_opt", - "set_rx_sense", - "set_rx_gain_cal", - "set_chan_dig_gain", - "tx_pwctrl_init_cal", - "rfcal_txiq", - "set_tx_gain_table", - "correct_rfpll_offset", - "pll_correct_dcap", - "txiq_cal_init", - "pwdet_sar", - "pwdet_sar2_init", - "ram_iq_est_enable", - "ram_rfpll_set_freq", - "ant_wifirx_cfg", - "ant_btrx_cfg", - "force_txrxoff", - "force_txrx_off", - "tx_paon_set", - "opt_11b_resart", - "rfpll_1p2_opt", - "ram_dc_iq_est", - "ram_start_tx_tone", - "ram_en_pwdet", - "ram_cbw2040_cfg", - "rxdc_est_min", - "i2cmst_reg_init", - "temprature_sens_read", - "ram_restart_cal", - "ram_write_gain_mem", - "ram_wait_rfpll_cal_end", - "txcal_debuge_mode", - "ant_wifitx_cfg", - "reg_init_begin", - ], - "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], - "wifi_lmac": ["lmac"], - "wifi_device": ["wdev", "wDev_"], - "power_mgmt": [ - "pm_", - "sleep", - "rtc_sleep", - "light_sleep", - "deep_sleep", - "power_down", - "g_pm", - ], - "memory_mgmt": [ - "mem_", - "memory_", - "tlsf_", - "memp_", - "pbuf_", - "pbuf_alloc", - "pbuf_copy_partial_pbuf", - ], - "hal_layer": ["hal_"], - "clock_mgmt": [ - "clk_", - "clock_", - "rtc_clk", - "apb_", - "cpu_freq", - "setCpuFrequencyMhz", - ], - "cache_mgmt": ["cache"], - "flash_ops": ["flash", "image_load"], - "interrupt_handlers": [ - "isr", - "interrupt", - "intr_", - "exc_", - "exception", - "port_IntStack", - ], - "wrapper_functions": ["_wrapper"], - "error_handling": ["panic", "abort", "assert", "error_", "fault"], - "authentication": ["auth"], - "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], - "dhcp": ["dhcp", "handle_dhcp"], - "ethernet_phy": [ - "emac_", - "eth_phy_", - "phy_tlk110", - "phy_lan87", - "phy_ip101", - "phy_rtl", - "phy_dp83", - "phy_ksz", - "lan87xx_", - "rtl8201_", - "ip101_", - "ksz80xx_", - "jl1101_", - "dp83848_", - "eth_on_state_changed", - ], - "threading": ["pthread_", "thread_", "_task_"], - "pthread": ["pthread"], - "synchronization": ["mutex", "semaphore", "spinlock", "portMUX"], - "math_lib": [ - "sin", - "cos", - "tan", - "sqrt", - "pow", - "exp", - "log", - "atan", - "asin", - "acos", - "floor", - "ceil", - "fabs", - "round", - ], - "random": ["rand", "random", "rng_", "prng"], - "time_lib": [ - "time", - "clock", - "gettimeofday", - "settimeofday", - "localtime", - "gmtime", - "mktime", - "strftime", - ], - "console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"], - "rom_functions": ["r_", "rom_"], - "compiler_runtime": [ - "__divdi3", - "__udivdi3", - "__moddi3", - "__muldi3", - "__ashldi3", - "__ashrdi3", - "__lshrdi3", - "__cmpdi2", - "__fixdfdi", - "__floatdidf", - ], - "libgcc": ["libgcc", "_divdi3", "_udivdi3"], - "boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"], - "bootloader": ["bootloader_", "esp_bootloader"], - "app_framework": ["app_", "initArduino", "setup", "loop", "Update"], - "weak_symbols": ["__weak_"], - "compiler_builtins": ["__builtin_"], - "vfs": ["vfs_", "VFS"], - "esp32_sdk": ["esp32_", "esp32c", "esp32s"], - "usb": ["usb_", "USB", "cdc_", "CDC"], - "i2c_driver": ["i2c_", "I2C"], - "i2s_driver": ["i2s_", "I2S"], - "spi_driver": ["spi_", "SPI"], - "adc_driver": ["adc_", "ADC"], - "dac_driver": ["dac_", "DAC"], - "touch_driver": ["touch_", "TOUCH"], - "pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"], - "rmt_driver": ["rmt_", "RMT"], - "pcnt_driver": ["pcnt_", "PCNT"], - "can_driver": ["can_", "CAN", "twai_", "TWAI"], - "sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"], - "temp_sensor": ["temp_sensor", "tsens_"], - "watchdog": ["wdt_", "WDT", "watchdog"], - "brownout": ["brownout", "bod_"], - "ulp": ["ulp_", "ULP"], - "psram": ["psram", "PSRAM", "spiram", "SPIRAM"], - "efuse": ["efuse", "EFUSE"], - "partition": ["partition", "esp_partition"], - "esp_event": ["esp_event", "event_loop", "event_callback"], - "esp_console": ["esp_console", "console_"], - "chip_specific": ["chip_", "esp_chip"], - "esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"], - "ipc": ["esp_ipc", "ipc_"], - "wifi_config": [ - "g_cnxMgr", - "gChmCxt", - "g_ic", - "TxRxCxt", - "s_dp", - "s_ni", - "s_reg_dump", - "packet$", - "d_mult_table", - "K", - "fcstab", - ], - "smartconfig": ["sc_ack_send"], - "rc_calibration": ["rc_cal", "rcUpdate"], - "noise_floor": ["noise_check"], - "rf_calibration": [ - "set_rx_sense", - "set_rx_gain_cal", - "set_chan_dig_gain", - "tx_pwctrl_init_cal", - "rfcal_txiq", - "set_tx_gain_table", - "correct_rfpll_offset", - "pll_correct_dcap", - "txiq_cal_init", - "pwdet_sar", - "rx_11b_opt", - ], - "wifi_crypto": [ - "pk_use_ecparams", - "process_segments", - "ccmp_", - "rc4_", - "aria_", - "mgf_mask", - "dh_group", - "ccmp_aad_nonce", - "ccmp_encrypt", - "rc4_skip", - "aria_sb1", - "aria_sb2", - "aria_is1", - "aria_is2", - "aria_sl", - "aria_a", - ], - "radio_control": ["fsm_input", "fsm_sconfreq"], - "pbuf": [ - "pbuf_", - ], - "event_group": ["xEventGroup"], - "ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"], - "provisioning": ["prov_", "prov_stop_and_notify"], - "scan": ["gScanStruct"], - "port": ["xPort"], - "elf_loader": [ - "elf_add", - "elf_add_note", - "elf_add_segment", - "process_image", - "read_encoded", - "read_encoded_value", - "read_encoded_value_with_base", - "process_image_header", - ], - "socket_api": [ - "sockets", - "netconn_", - "accept_function", - "recv_raw", - "socket_ipv4_multicast", - "socket_ipv6_multicast", - ], - "igmp": ["igmp_", "igmp_send", "igmp_input"], - "icmp6": ["icmp6_"], - "arp": ["arp_table"], - "ampdu": [ - "ampdu_", - "rcAmpdu", - "trc_onAmpduOp", - "rcAmpduLowerRate", - "ampdu_dispatch_upto", - ], - "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], - "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], - "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], - "channel_mgmt": ["chm_init", "chm_set_current_channel"], - "trace": ["trc_init", "trc_onAmpduOp"], - "country_code": ["country_info", "country_info_24ghz"], - "multicore": ["do_multicore_settings"], - "Update_lib": ["Update"], - "stdio": [ - "__sf", - "__sflush_r", - "__srefill_r", - "_impure_data", - "_reclaim_reent", - "_open_r", - ], - "strncpy_ops": ["strncpy"], - "math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"], - "character_class": ["__chclass"], - "camellia": ["camellia_", "camellia_feistel"], - "crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"], - "event_buffer": ["g_eb_list_desc", "eb_space"], - "base_node": ["base_node_", "base_node_add_handler"], - "file_descriptor": ["s_fd_table"], - "tx_delay": ["tx_delay_cfg"], - "deinit": ["deinit_functions"], - "lcp_echo": ["LcpEchoCheck"], - "raw_api": ["raw_bind", "raw_connect"], - "checksum": ["process_checksum"], - "entry_management": ["add_entry"], - "esp_ota": ["esp_ota", "ota_", "read_otadata"], - "http_server": [ - "httpd_", - "parse_url_char", - "cb_headers_complete", - "delete_entry", - "validate_structure", - "config_save", - "config_new", - "verify_url", - "cb_url", - ], - "misc_system": [ - "alarm_cbs", - "start_up", - "tokens", - "unhex", - "osi_funcs_ro", - "enum_function", - "fragment_and_dispatch", - "alarm_set", - "osi_alarm_new", - "config_set_string", - "config_update_newest_section", - "config_remove_key", - "method_strings", - "interop_match", - "interop_database", - "__state_table", - "__action_table", - "s_stub_table", - "s_context", - "s_mmu_ctx", - "s_get_bus_mask", - "hli_queue_put", - "list_remove", - "list_delete", - "lock_acquire_generic", - "is_vect_desc_usable", - "io_mode_str", - "__c$20233", - "interface", - "read_id_core", - "subscribe_idle", - "unsubscribe_idle", - "s_clkout_handle", - "lock_release_generic", - "config_set_int", - "config_get_int", - "config_get_string", - "config_has_key", - "config_remove_section", - "osi_alarm_init", - "osi_alarm_deinit", - "fixed_queue_enqueue", - "fixed_queue_dequeue", - "fixed_queue_new", - "fixed_pkt_queue_enqueue", - "fixed_pkt_queue_new", - "list_append", - "list_prepend", - "list_insert_after", - "list_contains", - "list_get_node", - "hash_function_blob", - "cb_no_body", - "cb_on_body", - "profile_tab", - "get_arg", - "trim", - "buf$", - "process_appended_hash_and_sig$constprop$0", - "uuidType", - "allocate_svc_db_buf", - "_hostname_is_ours", - "s_hli_handlers", - "tick_cb", - "idle_cb", - "input", - "entry_find", - "section_find", - "find_bucket_entry_", - "config_has_section", - "hli_queue_create", - "hli_queue_get", - "hli_c_handler", - "future_ready", - "future_await", - "future_new", - "pkt_queue_enqueue", - "pkt_queue_dequeue", - "pkt_queue_cleanup", - "pkt_queue_create", - "pkt_queue_destroy", - "fixed_pkt_queue_dequeue", - "osi_alarm_cancel", - "osi_alarm_is_active", - "osi_sem_take", - "osi_event_create", - "osi_event_bind", - "alarm_cb_handler", - "list_foreach", - "list_back", - "list_front", - "list_clear", - "fixed_queue_try_peek_first", - "translate_path", - "get_idx", - "find_key", - "init", - "end", - "start", - "set_read_value", - "copy_address_list", - "copy_and_key", - "sdk_cfg_opts", - "leftshift_onebit", - "config_section_end", - "config_section_begin", - "find_entry_and_check_all_reset", - "image_validate", - "xPendingReadyList", - "vListInitialise", - "lock_init_generic", - "ant_bttx_cfg", - "ant_dft_cfg", - "cs_send_to_ctrl_sock", - "config_llc_util_funcs_reset", - "make_set_adv_report_flow_control", - "make_set_event_mask", - "raw_new", - "raw_remove", - "BTE_InitStack", - "parse_read_local_supported_features_response", - "__math_invalidf", - "tinytens", - "__mprec_tinytens", - "__mprec_bigtens", - "vRingbufferDelete", - "vRingbufferDeleteWithCaps", - "vRingbufferReturnItem", - "vRingbufferReturnItemFromISR", - "get_acl_data_size_ble", - "get_features_ble", - "get_features_classic", - "get_acl_packet_size_ble", - "get_acl_packet_size_classic", - "supports_extended_inquiry_response", - "supports_rssi_with_inquiry_results", - "supports_interlaced_inquiry_scan", - "supports_reading_remote_extended_features", - ], - "bluetooth_ll": [ - "lld_pdu_", - "ld_acl_", - "lld_stop_ind_handler", - "lld_evt_winsize_change", - "config_lld_evt_funcs_reset", - "config_lld_funcs_reset", - "config_llm_funcs_reset", - "llm_set_long_adv_data", - "lld_retry_tx_prog", - "llc_link_sup_to_ind_handler", - "config_llc_funcs_reset", - "lld_evt_rxwin_compute", - "config_btdm_funcs_reset", - "config_ea_funcs_reset", - "llc_defalut_state_tab_reset", - "config_rwip_funcs_reset", - "ke_lmp_rx_flooding_detect", - ], -} - -# Demangled patterns: patterns found in demangled C++ names -DEMANGLED_PATTERNS = { - "gpio_driver": ["GPIO"], - "uart_driver": ["UART"], - "network_stack": [ - "lwip", - "tcp", - "udp", - "ip4", - "ip6", - "dhcp", - "dns", - "netif", - "ethernet", - "ppp", - "slip", - ], - "wifi_stack": ["NetworkInterface"], - "nimble_bt": [ - "nimble", - "NimBLE", - "ble_hs", - "ble_gap", - "ble_gatt", - "ble_att", - "ble_l2cap", - "ble_sm", - ], - "crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"], - "cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"], - "static_init": ["__static_initialization"], - "rtti": ["__type_info", "__class_type_info"], - "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], - "async_tcp": ["AsyncClient", "AsyncServer"], - "mdns_lib": ["mdns"], - "json_lib": [ - "ArduinoJson", - "JsonDocument", - "JsonArray", - "JsonObject", - "deserialize", - "serialize", - ], - "http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"], - "logging": ["log", "Log", "print", "Print", "diag_"], - "authentication": ["checkDigestAuthentication"], - "libgcc": ["libgcc"], - "esp_system": ["esp_", "ESP"], - "arduino": ["arduino"], - "nvs": ["nvs_", "_ZTVN3nvs", "nvs::"], - "filesystem": ["spiffs", "vfs"], - "libc": ["newlib"], -} - # Get the list of actual ESPHome components by scanning the components directory def get_esphome_components(): @@ -870,7 +18,8 @@ def get_esphome_components(): components = set() # Find the components directory relative to this file - current_dir = Path(__file__).parent + # Go up two levels from analyze_memory/__init__.py to esphome/ + current_dir = Path(__file__).parent.parent components_dir = current_dir / "components" if components_dir.exists() and components_dir.is_dir(): diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py new file mode 100644 index 0000000000..68cd957090 --- /dev/null +++ b/esphome/analyze_memory/const.py @@ -0,0 +1,857 @@ +"""Constants for memory analysis symbol pattern matching.""" + +import re + +# Pattern to extract ESPHome component namespaces dynamically +ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") + +# Component identification rules +# Symbol patterns: patterns found in raw symbol names +SYMBOL_PATTERNS = { + "freertos": [ + "vTask", + "xTask", + "xQueue", + "pvPort", + "vPort", + "uxTask", + "pcTask", + "prvTimerTask", + "prvAddNewTaskToReadyList", + "pxReadyTasksLists", + "prvAddCurrentTaskToDelayedList", + "xEventGroupWaitBits", + "xRingbufferSendFromISR", + "prvSendItemDoneNoSplit", + "prvReceiveGeneric", + "prvSendAcquireGeneric", + "prvCopyItemAllowSplit", + "xEventGroup", + "xRingbuffer", + "prvSend", + "prvReceive", + "prvCopy", + "xPort", + "ulTaskGenericNotifyTake", + "prvIdleTask", + "prvInitialiseNewTask", + "prvIsYieldRequiredSMP", + "prvGetItemByteBuf", + "prvInitializeNewRingbuffer", + "prvAcquireItemNoSplit", + "prvNotifyQueueSetContainer", + "ucStaticTimerQueueStorage", + "eTaskGetState", + "main_task", + "do_system_init_fn", + "xSemaphoreCreateGenericWithCaps", + "vListInsert", + "uxListRemove", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "prvCheckItemFitsByteBuffer", + "prvGetCurMaxSizeAllowSplit", + "tick_hook", + "sys_sem_new", + "sys_arch_mbox_fetch", + "sys_arch_sem_wait", + "prvDeleteTCB", + "vQueueDeleteWithCaps", + "vRingbufferDeleteWithCaps", + "vSemaphoreDeleteWithCaps", + "prvCheckItemAvail", + "prvCheckTaskCanBeScheduledSMP", + "prvGetCurMaxSizeNoSplit", + "prvResetNextTaskUnblockTime", + "prvReturnItemByteBuf", + "vApplicationStackOverflowHook", + "vApplicationGetIdleTaskMemory", + "sys_init", + "sys_mbox_new", + "sys_arch_mbox_tryfetch", + ], + "xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"], + "heap": ["heap_", "multi_heap"], + "spi_flash": ["spi_flash"], + "rtc": ["rtc_", "rtcio_ll_"], + "gpio_driver": ["gpio_", "pins"], + "uart_driver": ["uart", "_uart", "UART"], + "timer": ["timer_", "esp_timer"], + "peripherals": ["periph_", "periman"], + "network_stack": [ + "vj_compress", + "raw_sendto", + "raw_input", + "etharp_", + "icmp_input", + "socket_ipv6", + "ip_napt", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + "netconn_", + "recv_raw", + "accept_function", + "netconn_recv_data", + "netconn_accept", + "netconn_write_vectors_partly", + "netconn_drain", + "raw_connect", + "raw_bind", + "icmp_send_response", + "sockets", + "icmp_dest_unreach", + "inet_chksum_pseudo", + "alloc_socket", + "done_socket", + "set_global_fd_sets", + "inet_chksum_pbuf", + "tryget_socket_unconn_locked", + "tryget_socket_unconn", + "cs_create_ctrl_sock", + "netbuf_alloc", + ], + "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "ap_", + "scan_", + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_", + "mac_", + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + "s_do_mapping", + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + ], + "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], + "wifi_bt_coex": ["coex"], + "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], + "bluedroid_bt": [ + "bluedroid", + "btc_", + "bta_", + "btm_", + "btu_", + "BTM_", + "GATT", + "L2CA_", + "smp_", + "gatts_", + "attp_", + "l2cu_", + "l2cb", + "smp_cb", + "BTA_GATTC_", + "SMP_", + "BTU_", + "BTA_Dm", + "GAP_Ble", + "BT_tx_if", + "host_recv_pkt_cb", + "saved_local_oob_data", + "string_to_bdaddr", + "string_is_bdaddr", + "CalConnectParamTimeout", + "transmit_fragment", + "transmit_data", + "event_command_ready", + "read_command_complete_header", + "parse_read_local_extended_features_response", + "parse_read_local_version_info_response", + "should_request_high", + "btdm_wakeup_request", + "BTA_SetAttributeValue", + "BTA_EnableBluetooth", + "transmit_command_futured", + "transmit_command", + "get_waiting_command", + "make_command", + "transmit_downward", + "host_recv_adv_packet", + "copy_extra_byte_in_db", + "parse_read_local_supported_commands_response", + ], + "crypto_math": [ + "ecp_", + "bignum_", + "mpi_", + "sswu", + "modp", + "dragonfly_", + "gcm_mult", + "__multiply", + "quorem", + "__mdiff", + "__lshift", + "__mprec_tens", + "ECC_", + "multiprecision_", + "mix_sub_columns", + "sbox", + "gfm2_sbox", + "gfm3_sbox", + "curve_p256", + "curve", + "p_256_init_curve", + "shift_sub_rows", + "rshift", + ], + "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], + "libc": [ + "printf", + "scanf", + "malloc", + "free", + "memcpy", + "memset", + "strcpy", + "strlen", + "_dtoa", + "_fopen", + "__sfvwrite_r", + "qsort", + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + "strncpy", + "_strtod_l", + "__gethex", + "__hexnan", + "_setenv_r", + "_tzset_unlocked_r", + "__tzcalc_limits", + "select", + "scalbnf", + "strtof", + "strtof_l", + "__d2b", + "__b2d", + "__s2b", + "_Balloc", + "__multadd", + "__lo0bits", + "__atexit0", + "__smakebuf_r", + "__swhatbuf_r", + "_sungetc_r", + "_close_r", + "_link_r", + "_unsetenv_r", + "_rename_r", + "__month_lengths", + "tzinfo", + "__ratio", + "__hi0bits", + "__ulp", + "__any_on", + "__copybits", + "L_shift", + "_fcntl_r", + "_lseek_r", + "_read_r", + "_write_r", + "_unlink_r", + "_fstat_r", + "access", + "fsync", + "tcsetattr", + "tcgetattr", + "tcflush", + "tcdrain", + "__ssrefill_r", + "_stat_r", + "__hexdig_fun", + "__mcmp", + "_fwalk_sglue", + "__fpclassifyf", + "_setlocale_r", + "_mbrtowc_r", + "fcntl", + "__match", + "_lock_close", + "__c$", + "__func__$", + "__FUNCTION__$", + "DAYS_IN_MONTH", + "_DAYS_BEFORE_MONTH", + "CSWTCH$", + "dst$", + "sulp", + ], + "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], + "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], + "file_io": [ + "fread", + "fwrite", + "fopen", + "fclose", + "fseek", + "ftell", + "fflush", + "s_fd_table", + ], + "string_formatting": [ + "snprintf", + "vsnprintf", + "sprintf", + "vsprintf", + "sscanf", + "vsscanf", + ], + "cpp_anonymous": ["_GLOBAL__N_", "n$"], + "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], + "static_init": ["_GLOBAL__sub_I_"], + "mdns_lib": ["mdns"], + "phy_radio": [ + "phy_", + "rf_", + "chip_", + "register_chipv7", + "pbus_", + "bb_", + "fe_", + "rfcal_", + "ram_rfcal", + "tx_pwctrl", + "rx_chan", + "set_rx_gain", + "set_chan", + "agc_reg", + "ram_txiq", + "ram_txdc", + "ram_gen_rx_gain", + "rx_11b_opt", + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "pwdet_sar2_init", + "ram_iq_est_enable", + "ram_rfpll_set_freq", + "ant_wifirx_cfg", + "ant_btrx_cfg", + "force_txrxoff", + "force_txrx_off", + "tx_paon_set", + "opt_11b_resart", + "rfpll_1p2_opt", + "ram_dc_iq_est", + "ram_start_tx_tone", + "ram_en_pwdet", + "ram_cbw2040_cfg", + "rxdc_est_min", + "i2cmst_reg_init", + "temprature_sens_read", + "ram_restart_cal", + "ram_write_gain_mem", + "ram_wait_rfpll_cal_end", + "txcal_debuge_mode", + "ant_wifitx_cfg", + "reg_init_begin", + ], + "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], + "wifi_lmac": ["lmac"], + "wifi_device": ["wdev", "wDev_"], + "power_mgmt": [ + "pm_", + "sleep", + "rtc_sleep", + "light_sleep", + "deep_sleep", + "power_down", + "g_pm", + ], + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + ], + "hal_layer": ["hal_"], + "clock_mgmt": [ + "clk_", + "clock_", + "rtc_clk", + "apb_", + "cpu_freq", + "setCpuFrequencyMhz", + ], + "cache_mgmt": ["cache"], + "flash_ops": ["flash", "image_load"], + "interrupt_handlers": [ + "isr", + "interrupt", + "intr_", + "exc_", + "exception", + "port_IntStack", + ], + "wrapper_functions": ["_wrapper"], + "error_handling": ["panic", "abort", "assert", "error_", "fault"], + "authentication": ["auth"], + "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], + "dhcp": ["dhcp", "handle_dhcp"], + "ethernet_phy": [ + "emac_", + "eth_phy_", + "phy_tlk110", + "phy_lan87", + "phy_ip101", + "phy_rtl", + "phy_dp83", + "phy_ksz", + "lan87xx_", + "rtl8201_", + "ip101_", + "ksz80xx_", + "jl1101_", + "dp83848_", + "eth_on_state_changed", + ], + "threading": ["pthread_", "thread_", "_task_"], + "pthread": ["pthread"], + "synchronization": ["mutex", "semaphore", "spinlock", "portMUX"], + "math_lib": [ + "sin", + "cos", + "tan", + "sqrt", + "pow", + "exp", + "log", + "atan", + "asin", + "acos", + "floor", + "ceil", + "fabs", + "round", + ], + "random": ["rand", "random", "rng_", "prng"], + "time_lib": [ + "time", + "clock", + "gettimeofday", + "settimeofday", + "localtime", + "gmtime", + "mktime", + "strftime", + ], + "console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"], + "rom_functions": ["r_", "rom_"], + "compiler_runtime": [ + "__divdi3", + "__udivdi3", + "__moddi3", + "__muldi3", + "__ashldi3", + "__ashrdi3", + "__lshrdi3", + "__cmpdi2", + "__fixdfdi", + "__floatdidf", + ], + "libgcc": ["libgcc", "_divdi3", "_udivdi3"], + "boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"], + "bootloader": ["bootloader_", "esp_bootloader"], + "app_framework": ["app_", "initArduino", "setup", "loop", "Update"], + "weak_symbols": ["__weak_"], + "compiler_builtins": ["__builtin_"], + "vfs": ["vfs_", "VFS"], + "esp32_sdk": ["esp32_", "esp32c", "esp32s"], + "usb": ["usb_", "USB", "cdc_", "CDC"], + "i2c_driver": ["i2c_", "I2C"], + "i2s_driver": ["i2s_", "I2S"], + "spi_driver": ["spi_", "SPI"], + "adc_driver": ["adc_", "ADC"], + "dac_driver": ["dac_", "DAC"], + "touch_driver": ["touch_", "TOUCH"], + "pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"], + "rmt_driver": ["rmt_", "RMT"], + "pcnt_driver": ["pcnt_", "PCNT"], + "can_driver": ["can_", "CAN", "twai_", "TWAI"], + "sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"], + "temp_sensor": ["temp_sensor", "tsens_"], + "watchdog": ["wdt_", "WDT", "watchdog"], + "brownout": ["brownout", "bod_"], + "ulp": ["ulp_", "ULP"], + "psram": ["psram", "PSRAM", "spiram", "SPIRAM"], + "efuse": ["efuse", "EFUSE"], + "partition": ["partition", "esp_partition"], + "esp_event": ["esp_event", "event_loop", "event_callback"], + "esp_console": ["esp_console", "console_"], + "chip_specific": ["chip_", "esp_chip"], + "esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"], + "ipc": ["esp_ipc", "ipc_"], + "wifi_config": [ + "g_cnxMgr", + "gChmCxt", + "g_ic", + "TxRxCxt", + "s_dp", + "s_ni", + "s_reg_dump", + "packet$", + "d_mult_table", + "K", + "fcstab", + ], + "smartconfig": ["sc_ack_send"], + "rc_calibration": ["rc_cal", "rcUpdate"], + "noise_floor": ["noise_check"], + "rf_calibration": [ + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "rx_11b_opt", + ], + "wifi_crypto": [ + "pk_use_ecparams", + "process_segments", + "ccmp_", + "rc4_", + "aria_", + "mgf_mask", + "dh_group", + "ccmp_aad_nonce", + "ccmp_encrypt", + "rc4_skip", + "aria_sb1", + "aria_sb2", + "aria_is1", + "aria_is2", + "aria_sl", + "aria_a", + ], + "radio_control": ["fsm_input", "fsm_sconfreq"], + "pbuf": [ + "pbuf_", + ], + "event_group": ["xEventGroup"], + "ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"], + "provisioning": ["prov_", "prov_stop_and_notify"], + "scan": ["gScanStruct"], + "port": ["xPort"], + "elf_loader": [ + "elf_add", + "elf_add_note", + "elf_add_segment", + "process_image", + "read_encoded", + "read_encoded_value", + "read_encoded_value_with_base", + "process_image_header", + ], + "socket_api": [ + "sockets", + "netconn_", + "accept_function", + "recv_raw", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + ], + "igmp": ["igmp_", "igmp_send", "igmp_input"], + "icmp6": ["icmp6_"], + "arp": ["arp_table"], + "ampdu": [ + "ampdu_", + "rcAmpdu", + "trc_onAmpduOp", + "rcAmpduLowerRate", + "ampdu_dispatch_upto", + ], + "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], + "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], + "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], + "channel_mgmt": ["chm_init", "chm_set_current_channel"], + "trace": ["trc_init", "trc_onAmpduOp"], + "country_code": ["country_info", "country_info_24ghz"], + "multicore": ["do_multicore_settings"], + "Update_lib": ["Update"], + "stdio": [ + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + ], + "strncpy_ops": ["strncpy"], + "math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"], + "character_class": ["__chclass"], + "camellia": ["camellia_", "camellia_feistel"], + "crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"], + "event_buffer": ["g_eb_list_desc", "eb_space"], + "base_node": ["base_node_", "base_node_add_handler"], + "file_descriptor": ["s_fd_table"], + "tx_delay": ["tx_delay_cfg"], + "deinit": ["deinit_functions"], + "lcp_echo": ["LcpEchoCheck"], + "raw_api": ["raw_bind", "raw_connect"], + "checksum": ["process_checksum"], + "entry_management": ["add_entry"], + "esp_ota": ["esp_ota", "ota_", "read_otadata"], + "http_server": [ + "httpd_", + "parse_url_char", + "cb_headers_complete", + "delete_entry", + "validate_structure", + "config_save", + "config_new", + "verify_url", + "cb_url", + ], + "misc_system": [ + "alarm_cbs", + "start_up", + "tokens", + "unhex", + "osi_funcs_ro", + "enum_function", + "fragment_and_dispatch", + "alarm_set", + "osi_alarm_new", + "config_set_string", + "config_update_newest_section", + "config_remove_key", + "method_strings", + "interop_match", + "interop_database", + "__state_table", + "__action_table", + "s_stub_table", + "s_context", + "s_mmu_ctx", + "s_get_bus_mask", + "hli_queue_put", + "list_remove", + "list_delete", + "lock_acquire_generic", + "is_vect_desc_usable", + "io_mode_str", + "__c$20233", + "interface", + "read_id_core", + "subscribe_idle", + "unsubscribe_idle", + "s_clkout_handle", + "lock_release_generic", + "config_set_int", + "config_get_int", + "config_get_string", + "config_has_key", + "config_remove_section", + "osi_alarm_init", + "osi_alarm_deinit", + "fixed_queue_enqueue", + "fixed_queue_dequeue", + "fixed_queue_new", + "fixed_pkt_queue_enqueue", + "fixed_pkt_queue_new", + "list_append", + "list_prepend", + "list_insert_after", + "list_contains", + "list_get_node", + "hash_function_blob", + "cb_no_body", + "cb_on_body", + "profile_tab", + "get_arg", + "trim", + "buf$", + "process_appended_hash_and_sig$constprop$0", + "uuidType", + "allocate_svc_db_buf", + "_hostname_is_ours", + "s_hli_handlers", + "tick_cb", + "idle_cb", + "input", + "entry_find", + "section_find", + "find_bucket_entry_", + "config_has_section", + "hli_queue_create", + "hli_queue_get", + "hli_c_handler", + "future_ready", + "future_await", + "future_new", + "pkt_queue_enqueue", + "pkt_queue_dequeue", + "pkt_queue_cleanup", + "pkt_queue_create", + "pkt_queue_destroy", + "fixed_pkt_queue_dequeue", + "osi_alarm_cancel", + "osi_alarm_is_active", + "osi_sem_take", + "osi_event_create", + "osi_event_bind", + "alarm_cb_handler", + "list_foreach", + "list_back", + "list_front", + "list_clear", + "fixed_queue_try_peek_first", + "translate_path", + "get_idx", + "find_key", + "init", + "end", + "start", + "set_read_value", + "copy_address_list", + "copy_and_key", + "sdk_cfg_opts", + "leftshift_onebit", + "config_section_end", + "config_section_begin", + "find_entry_and_check_all_reset", + "image_validate", + "xPendingReadyList", + "vListInitialise", + "lock_init_generic", + "ant_bttx_cfg", + "ant_dft_cfg", + "cs_send_to_ctrl_sock", + "config_llc_util_funcs_reset", + "make_set_adv_report_flow_control", + "make_set_event_mask", + "raw_new", + "raw_remove", + "BTE_InitStack", + "parse_read_local_supported_features_response", + "__math_invalidf", + "tinytens", + "__mprec_tinytens", + "__mprec_bigtens", + "vRingbufferDelete", + "vRingbufferDeleteWithCaps", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "get_acl_data_size_ble", + "get_features_ble", + "get_features_classic", + "get_acl_packet_size_ble", + "get_acl_packet_size_classic", + "supports_extended_inquiry_response", + "supports_rssi_with_inquiry_results", + "supports_interlaced_inquiry_scan", + "supports_reading_remote_extended_features", + ], + "bluetooth_ll": [ + "lld_pdu_", + "ld_acl_", + "lld_stop_ind_handler", + "lld_evt_winsize_change", + "config_lld_evt_funcs_reset", + "config_lld_funcs_reset", + "config_llm_funcs_reset", + "llm_set_long_adv_data", + "lld_retry_tx_prog", + "llc_link_sup_to_ind_handler", + "config_llc_funcs_reset", + "lld_evt_rxwin_compute", + "config_btdm_funcs_reset", + "config_ea_funcs_reset", + "llc_defalut_state_tab_reset", + "config_rwip_funcs_reset", + "ke_lmp_rx_flooding_detect", + ], +} + +# Demangled patterns: patterns found in demangled C++ names +DEMANGLED_PATTERNS = { + "gpio_driver": ["GPIO"], + "uart_driver": ["UART"], + "network_stack": [ + "lwip", + "tcp", + "udp", + "ip4", + "ip6", + "dhcp", + "dns", + "netif", + "ethernet", + "ppp", + "slip", + ], + "wifi_stack": ["NetworkInterface"], + "nimble_bt": [ + "nimble", + "NimBLE", + "ble_hs", + "ble_gap", + "ble_gatt", + "ble_att", + "ble_l2cap", + "ble_sm", + ], + "crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"], + "cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"], + "static_init": ["__static_initialization"], + "rtti": ["__type_info", "__class_type_info"], + "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], + "async_tcp": ["AsyncClient", "AsyncServer"], + "mdns_lib": ["mdns"], + "json_lib": [ + "ArduinoJson", + "JsonDocument", + "JsonArray", + "JsonObject", + "deserialize", + "serialize", + ], + "http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"], + "logging": ["log", "Log", "print", "Print", "diag_"], + "authentication": ["checkDigestAuthentication"], + "libgcc": ["libgcc"], + "esp_system": ["esp_", "ESP"], + "arduino": ["arduino"], + "nvs": ["nvs_", "_ZTVN3nvs", "nvs::"], + "filesystem": ["spiffs", "vfs"], + "libc": ["newlib"], +} diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 0b3bf87590..0f65e4fbbd 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -18,7 +18,7 @@ import sys sys.path.insert(0, str(Path(__file__).parent.parent)) # pylint: disable=wrong-import-position -from esphome.analyze_memory import MemoryAnalyzer +from esphome.analyze_memory import MemoryAnalyzer # noqa: E402 # Comment marker to identify our memory impact comments COMMENT_MARKER = "" From 6d2c700c438e63fc6e2f8dc0da69ceb9790fdec3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:43:05 -1000 Subject: [PATCH 021/336] relo --- esphome/analyze_memory/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index c6fdb1028d..b85b1d5765 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -1,6 +1,7 @@ """Memory usage analyzer for ESPHome compiled binaries.""" from collections import defaultdict +from functools import cache import json import logging from pathlib import Path @@ -13,6 +14,7 @@ _LOGGER = logging.getLogger(__name__) # Get the list of actual ESPHome components by scanning the components directory +@cache def get_esphome_components(): """Get set of actual ESPHome components from the components directory.""" components = set() @@ -34,10 +36,6 @@ def get_esphome_components(): return components -# Cache the component list -ESPHOME_COMPONENTS = get_esphome_components() - - class MemorySection: """Represents a memory section with its symbols.""" @@ -285,7 +283,7 @@ class MemoryAnalyzer: if "esphome::" in demangled: # Check for special component classes that include component name in the class # For example: esphome::ESPHomeOTAComponent -> ota component - for component_name in ESPHOME_COMPONENTS: + for component_name in get_esphome_components(): # Check various naming patterns component_upper = component_name.upper() component_camel = component_name.replace("_", "").title() @@ -307,7 +305,7 @@ class MemoryAnalyzer: component_name = component_name.rstrip("_") # Check if this is an actual component in the components directory - if component_name in ESPHOME_COMPONENTS: + if component_name in get_esphome_components(): return f"[esphome]{component_name}" # Check if this is a known external component from the config if component_name in self.external_components: From 256d3b119b907e4551dcfeb0e01122facc61fd3a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:44:30 -1000 Subject: [PATCH 022/336] relo --- esphome/analyze_memory/__init__.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index b85b1d5765..050bc011a8 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -1,6 +1,7 @@ """Memory usage analyzer for ESPHome compiled binaries.""" from collections import defaultdict +from dataclasses import dataclass, field from functools import cache import json import logging @@ -36,32 +37,36 @@ def get_esphome_components(): return components +@dataclass class MemorySection: """Represents a memory section with its symbols.""" - def __init__(self, name: str): - self.name = name - self.symbols: list[tuple[str, int, str]] = [] # (symbol_name, size, component) - self.total_size = 0 + name: str + symbols: list[tuple[str, int, str]] = field( + default_factory=list + ) # (symbol_name, size, component) + total_size: int = 0 +@dataclass class ComponentMemory: """Tracks memory usage for a component.""" - def __init__(self, name: str): - self.name = name - self.text_size = 0 # Code in flash - self.rodata_size = 0 # Read-only data in flash - self.data_size = 0 # Initialized data (flash + ram) - self.bss_size = 0 # Uninitialized data (ram only) - self.symbol_count = 0 + name: str + text_size: int = 0 # Code in flash + rodata_size: int = 0 # Read-only data in flash + data_size: int = 0 # Initialized data (flash + ram) + bss_size: int = 0 # Uninitialized data (ram only) + symbol_count: int = 0 @property def flash_total(self) -> int: + """Total flash usage (text + rodata + data).""" return self.text_size + self.rodata_size + self.data_size @property def ram_total(self) -> int: + """Total RAM usage (data + bss).""" return self.data_size + self.bss_size From 5049c7227d6cb2935c767fab1697c5494f9d5947 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:50:15 -1000 Subject: [PATCH 023/336] reduce --- esphome/analyze_memory/__init__.py | 69 ++++++++++++------------------ esphome/analyze_memory/const.py | 28 ++++++++++++ 2 files changed, 56 insertions(+), 41 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 050bc011a8..63002d848d 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -9,7 +9,12 @@ from pathlib import Path import re import subprocess -from .const import DEMANGLED_PATTERNS, ESPHOME_COMPONENT_PATTERN, SYMBOL_PATTERNS +from .const import ( + CORE_SUBCATEGORY_PATTERNS, + DEMANGLED_PATTERNS, + ESPHOME_COMPONENT_PATTERN, + SYMBOL_PATTERNS, +) _LOGGER = logging.getLogger(__name__) @@ -37,6 +42,26 @@ def get_esphome_components(): return components +@cache +def get_component_class_patterns(component_name: str) -> list[str]: + """Generate component class name patterns for symbol matching. + + Args: + component_name: The component name (e.g., "ota", "wifi", "api") + + Returns: + List of pattern strings to match against demangled symbols + """ + component_upper = component_name.upper() + component_camel = component_name.replace("_", "").title() + return [ + f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent + f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent + f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + ] + + @dataclass class MemorySection: """Represents a memory section with its symbols.""" @@ -289,16 +314,7 @@ class MemoryAnalyzer: # Check for special component classes that include component name in the class # For example: esphome::ESPHomeOTAComponent -> ota component for component_name in get_esphome_components(): - # Check various naming patterns - component_upper = component_name.upper() - component_camel = component_name.replace("_", "").title() - patterns = [ - f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent - f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent - f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent - f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent - ] - + patterns = get_component_class_patterns(component_name) if any(pattern in demangled for pattern in patterns): return f"[esphome]{component_name}" @@ -394,35 +410,6 @@ class MemoryAnalyzer: def _categorize_esphome_core_symbol(self, demangled: str) -> str: """Categorize ESPHome core symbols into subcategories.""" - # Dictionary of patterns for core subcategories - CORE_SUBCATEGORY_PATTERNS = { - "Component Framework": ["Component"], - "Application Core": ["Application"], - "Scheduler": ["Scheduler"], - "Logging": ["Logger", "log_"], - "Preferences": ["preferences", "Preferences"], - "Synchronization": ["Mutex", "Lock"], - "Helpers": ["Helper"], - "Network Utilities": ["network", "Network"], - "Time Management": ["time", "Time"], - "String Utilities": ["str_", "string"], - "Parsing/Formatting": ["parse_", "format_"], - "Optional Types": ["optional", "Optional"], - "Callbacks": ["Callback", "callback"], - "Color Utilities": ["Color"], - "C++ Operators": ["operator"], - "Global Variables": ["global_", "_GLOBAL"], - "Setup/Loop": ["setup", "loop"], - "System Control": ["reboot", "restart"], - "GPIO Management": ["GPIO", "gpio"], - "Interrupt Handling": ["ISR", "interrupt"], - "Hooks": ["Hook", "hook"], - "Entity Base Classes": ["Entity"], - "Automation Framework": ["automation", "Automation"], - "Automation Components": ["Condition", "Action", "Trigger"], - "Lambda Support": ["lambda"], - } - # Special patterns that need to be checked separately if any(pattern in demangled for pattern in ["vtable", "typeinfo", "thunk"]): return "C++ Runtime (vtables/RTTI)" @@ -430,7 +417,7 @@ class MemoryAnalyzer: if demangled.startswith("std::"): return "C++ STL" - # Check against patterns + # Check against patterns from const.py for category, patterns in CORE_SUBCATEGORY_PATTERNS.items(): if any(pattern in demangled for pattern in patterns): return category diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index 68cd957090..df37c0b2cd 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -855,3 +855,31 @@ DEMANGLED_PATTERNS = { "filesystem": ["spiffs", "vfs"], "libc": ["newlib"], } + +# Patterns for categorizing ESPHome core symbols into subcategories +CORE_SUBCATEGORY_PATTERNS = { + "Component Framework": ["Component"], + "Application Core": ["Application"], + "Scheduler": ["Scheduler"], + "Component Iterator": ["ComponentIterator"], + "Helper Functions": ["Helpers", "helpers"], + "Preferences/Storage": ["Preferences", "ESPPreferences"], + "I/O Utilities": ["HighFrequencyLoopRequester"], + "String Utilities": ["str_"], + "Bit Utilities": ["reverse_bits"], + "Data Conversion": ["convert_"], + "Network Utilities": ["network", "IPAddress"], + "API Protocol": ["api::"], + "WiFi Manager": ["wifi::"], + "MQTT Client": ["mqtt::"], + "Logger": ["logger::"], + "OTA Updates": ["ota::"], + "Web Server": ["web_server::"], + "Time Management": ["time::"], + "Sensor Framework": ["sensor::"], + "Binary Sensor": ["binary_sensor::"], + "Switch Framework": ["switch_::"], + "Light Framework": ["light::"], + "Climate Framework": ["climate::"], + "Cover Framework": ["cover::"], +} From 43c62297e84d91b581a22f34d9f1b3196cac7ebd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:56:31 -1000 Subject: [PATCH 024/336] merge --- esphome/analyze_memory/__init__.py | 327 +--------------------------- esphome/analyze_memory/__main__.py | 6 + esphome/analyze_memory/cli.py | 338 +++++++++++++++++++++++++++++ 3 files changed, 350 insertions(+), 321 deletions(-) create mode 100644 esphome/analyze_memory/__main__.py create mode 100644 esphome/analyze_memory/cli.py diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 63002d848d..9c35965b74 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -174,7 +174,7 @@ class MemoryAnalyzer: self.sections[mapped_section].total_size += size except subprocess.CalledProcessError as e: - _LOGGER.error(f"Failed to parse sections: {e}") + _LOGGER.error("Failed to parse sections: %s", e) raise def _parse_symbols(self) -> None: @@ -252,7 +252,7 @@ class MemoryAnalyzer: seen_addresses.add(address) except subprocess.CalledProcessError as e: - _LOGGER.error(f"Failed to parse symbols: {e}") + _LOGGER.error("Failed to parse symbols: %s", e) raise def _categorize_symbols(self) -> None: @@ -399,8 +399,9 @@ class MemoryAnalyzer: # If batch fails, cache originals for symbol in symbols: self._demangle_cache[symbol] = symbol - except Exception: + except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: # On error, cache originals + _LOGGER.debug("Failed to batch demangle symbols: %s", e) for symbol in symbols: self._demangle_cache[symbol] = symbol @@ -424,267 +425,6 @@ class MemoryAnalyzer: return "Other Core" - def generate_report(self, detailed: bool = False) -> str: - """Generate a formatted memory report.""" - components = sorted( - self.components.items(), key=lambda x: x[1].flash_total, reverse=True - ) - - # Calculate totals - total_flash = sum(c.flash_total for _, c in components) - total_ram = sum(c.ram_total for _, c in components) - - # Build report - lines = [] - - # Column width constants - COL_COMPONENT = 29 - COL_FLASH_TEXT = 14 - COL_FLASH_DATA = 14 - COL_RAM_DATA = 12 - COL_RAM_BSS = 12 - COL_TOTAL_FLASH = 15 - COL_TOTAL_RAM = 12 - COL_SEPARATOR = 3 # " | " - - # Core analysis column widths - COL_CORE_SUBCATEGORY = 30 - COL_CORE_SIZE = 12 - COL_CORE_COUNT = 6 - COL_CORE_PERCENT = 10 - - # Calculate the exact table width - table_width = ( - COL_COMPONENT - + COL_SEPARATOR - + COL_FLASH_TEXT - + COL_SEPARATOR - + COL_FLASH_DATA - + COL_SEPARATOR - + COL_RAM_DATA - + COL_SEPARATOR - + COL_RAM_BSS - + COL_SEPARATOR - + COL_TOTAL_FLASH - + COL_SEPARATOR - + COL_TOTAL_RAM - ) - - lines.append("=" * table_width) - lines.append("Component Memory Analysis".center(table_width)) - lines.append("=" * table_width) - lines.append("") - - # Main table - fixed column widths - lines.append( - f"{'Component':<{COL_COMPONENT}} | {'Flash (text)':>{COL_FLASH_TEXT}} | {'Flash (data)':>{COL_FLASH_DATA}} | {'RAM (data)':>{COL_RAM_DATA}} | {'RAM (bss)':>{COL_RAM_BSS}} | {'Total Flash':>{COL_TOTAL_FLASH}} | {'Total RAM':>{COL_TOTAL_RAM}}" - ) - lines.append( - "-" * COL_COMPONENT - + "-+-" - + "-" * COL_FLASH_TEXT - + "-+-" - + "-" * COL_FLASH_DATA - + "-+-" - + "-" * COL_RAM_DATA - + "-+-" - + "-" * COL_RAM_BSS - + "-+-" - + "-" * COL_TOTAL_FLASH - + "-+-" - + "-" * COL_TOTAL_RAM - ) - - for name, mem in components: - if mem.flash_total > 0 or mem.ram_total > 0: - flash_rodata = mem.rodata_size + mem.data_size - lines.append( - f"{name:<{COL_COMPONENT}} | {mem.text_size:>{COL_FLASH_TEXT - 2},} B | {flash_rodata:>{COL_FLASH_DATA - 2},} B | " - f"{mem.data_size:>{COL_RAM_DATA - 2},} B | {mem.bss_size:>{COL_RAM_BSS - 2},} B | " - f"{mem.flash_total:>{COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{COL_TOTAL_RAM - 2},} B" - ) - - lines.append( - "-" * COL_COMPONENT - + "-+-" - + "-" * COL_FLASH_TEXT - + "-+-" - + "-" * COL_FLASH_DATA - + "-+-" - + "-" * COL_RAM_DATA - + "-+-" - + "-" * COL_RAM_BSS - + "-+-" - + "-" * COL_TOTAL_FLASH - + "-+-" - + "-" * COL_TOTAL_RAM - ) - lines.append( - f"{'TOTAL':<{COL_COMPONENT}} | {' ':>{COL_FLASH_TEXT}} | {' ':>{COL_FLASH_DATA}} | " - f"{' ':>{COL_RAM_DATA}} | {' ':>{COL_RAM_BSS}} | " - f"{total_flash:>{COL_TOTAL_FLASH - 2},} B | {total_ram:>{COL_TOTAL_RAM - 2},} B" - ) - - # Top consumers - lines.append("") - lines.append("Top Flash Consumers:") - for i, (name, mem) in enumerate(components[:25]): - if mem.flash_total > 0: - percentage = ( - (mem.flash_total / total_flash * 100) if total_flash > 0 else 0 - ) - lines.append( - f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash" - ) - - lines.append("") - lines.append("Top RAM Consumers:") - ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True) - for i, (name, mem) in enumerate(ram_components[:25]): - if mem.ram_total > 0: - percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0 - lines.append( - f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM" - ) - - lines.append("") - lines.append( - "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." - ) - lines.append("=" * table_width) - - # Add ESPHome core detailed analysis if there are core symbols - if self._esphome_core_symbols: - lines.append("") - lines.append("=" * table_width) - lines.append("[esphome]core Detailed Analysis".center(table_width)) - lines.append("=" * table_width) - lines.append("") - - # Group core symbols by subcategory - core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict( - list - ) - - for symbol, demangled, size in self._esphome_core_symbols: - # Categorize based on demangled name patterns - subcategory = self._categorize_esphome_core_symbol(demangled) - core_subcategories[subcategory].append((symbol, demangled, size)) - - # Sort subcategories by total size - sorted_subcategories = sorted( - [ - (name, symbols, sum(s[2] for s in symbols)) - for name, symbols in core_subcategories.items() - ], - key=lambda x: x[2], - reverse=True, - ) - - lines.append( - f"{'Subcategory':<{COL_CORE_SUBCATEGORY}} | {'Size':>{COL_CORE_SIZE}} | " - f"{'Count':>{COL_CORE_COUNT}} | {'% of Core':>{COL_CORE_PERCENT}}" - ) - lines.append( - "-" * COL_CORE_SUBCATEGORY - + "-+-" - + "-" * COL_CORE_SIZE - + "-+-" - + "-" * COL_CORE_COUNT - + "-+-" - + "-" * COL_CORE_PERCENT - ) - - core_total = sum(size for _, _, size in self._esphome_core_symbols) - - for subcategory, symbols, total_size in sorted_subcategories: - percentage = (total_size / core_total * 100) if core_total > 0 else 0 - lines.append( - f"{subcategory:<{COL_CORE_SUBCATEGORY}} | {total_size:>{COL_CORE_SIZE - 2},} B | " - f"{len(symbols):>{COL_CORE_COUNT}} | {percentage:>{COL_CORE_PERCENT - 1}.1f}%" - ) - - # Top 10 largest core symbols - lines.append("") - lines.append("Top 10 Largest [esphome]core Symbols:") - sorted_core_symbols = sorted( - self._esphome_core_symbols, key=lambda x: x[2], reverse=True - ) - - for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): - lines.append(f"{i + 1}. {demangled} ({size:,} B)") - - lines.append("=" * table_width) - - # Add detailed analysis for top ESPHome and external components - esphome_components = [ - (name, mem) - for name, mem in components - if name.startswith("[esphome]") and name != "[esphome]core" - ] - external_components = [ - (name, mem) for name, mem in components if name.startswith("[external]") - ] - - top_esphome_components = sorted( - esphome_components, key=lambda x: x[1].flash_total, reverse=True - )[:30] - - # Include all external components (they're usually important) - top_external_components = sorted( - external_components, key=lambda x: x[1].flash_total, reverse=True - ) - - # Check if API component exists and ensure it's included - api_component = None - for name, mem in components: - if name == "[esphome]api": - api_component = (name, mem) - break - - # Combine all components to analyze: top ESPHome + all external + API if not already included - components_to_analyze = list(top_esphome_components) + list( - top_external_components - ) - if api_component and api_component not in components_to_analyze: - components_to_analyze.append(api_component) - - if components_to_analyze: - for comp_name, comp_mem in components_to_analyze: - comp_symbols = self._component_symbols.get(comp_name, []) - if comp_symbols: - lines.append("") - lines.append("=" * table_width) - lines.append(f"{comp_name} Detailed Analysis".center(table_width)) - lines.append("=" * table_width) - lines.append("") - - # Sort symbols by size - sorted_symbols = sorted( - comp_symbols, key=lambda x: x[2], reverse=True - ) - - lines.append(f"Total symbols: {len(sorted_symbols)}") - lines.append(f"Total size: {comp_mem.flash_total:,} B") - lines.append("") - - # Show all symbols > 100 bytes for better visibility - large_symbols = [ - (sym, dem, size) - for sym, dem, size in sorted_symbols - if size > 100 - ] - - lines.append( - f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" - ) - for i, (symbol, demangled, size) in enumerate(large_symbols): - lines.append(f"{i + 1}. {demangled} ({size:,} B)") - - lines.append("=" * table_width) - - return "\n".join(lines) - def to_json(self) -> str: """Export analysis results as JSON.""" data = { @@ -707,63 +447,8 @@ class MemoryAnalyzer: } return json.dumps(data, indent=2) - def dump_uncategorized_symbols(self, output_file: str | None = None) -> None: - """Dump uncategorized symbols for analysis.""" - # Sort by size descending - sorted_symbols = sorted( - self._uncategorized_symbols, key=lambda x: x[2], reverse=True - ) - - lines = ["Uncategorized Symbols Analysis", "=" * 80] - lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}") - lines.append( - f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes" - ) - lines.append("") - lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled") - lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) - - for symbol, demangled, size in sorted_symbols[:100]: # Top 100 - if symbol != demangled: - lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled[:100]}") - else: - lines.append(f"{size:>10,} | {symbol[:60]:<60} | [not demangled]") - - if len(sorted_symbols) > 100: - lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") - - content = "\n".join(lines) - - if output_file: - with open(output_file, "w") as f: - f.write(content) - else: - print(content) - - -def analyze_elf( - elf_path: str, - objdump_path: str | None = None, - readelf_path: str | None = None, - detailed: bool = False, - external_components: set[str] | None = None, -) -> str: - """Analyze an ELF file and return a memory report.""" - analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path, external_components) - analyzer.analyze() - return analyzer.generate_report(detailed) - if __name__ == "__main__": - import sys + from .cli import main - if len(sys.argv) < 2: - print("Usage: analyze_memory.py ") - sys.exit(1) - - try: - report = analyze_elf(sys.argv[1]) - print(report) - except Exception as e: - print(f"Error: {e}") - sys.exit(1) + main() diff --git a/esphome/analyze_memory/__main__.py b/esphome/analyze_memory/__main__.py new file mode 100644 index 0000000000..aa772c3ad4 --- /dev/null +++ b/esphome/analyze_memory/__main__.py @@ -0,0 +1,6 @@ +"""Main entry point for running the memory analyzer as a module.""" + +from .cli import main + +if __name__ == "__main__": + main() diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py new file mode 100644 index 0000000000..ffce04bb6e --- /dev/null +++ b/esphome/analyze_memory/cli.py @@ -0,0 +1,338 @@ +"""CLI interface for memory analysis with report generation.""" + +from collections import defaultdict +import subprocess +import sys + +from . import MemoryAnalyzer + + +class MemoryAnalyzerCLI(MemoryAnalyzer): + """Memory analyzer with CLI-specific report generation.""" + + def generate_report(self, detailed: bool = False) -> str: + """Generate a formatted memory report.""" + components = sorted( + self.components.items(), key=lambda x: x[1].flash_total, reverse=True + ) + + # Calculate totals + total_flash = sum(c.flash_total for _, c in components) + total_ram = sum(c.ram_total for _, c in components) + + # Build report + lines = [] + + # Column width constants + COL_COMPONENT = 29 + COL_FLASH_TEXT = 14 + COL_FLASH_DATA = 14 + COL_RAM_DATA = 12 + COL_RAM_BSS = 12 + COL_TOTAL_FLASH = 15 + COL_TOTAL_RAM = 12 + COL_SEPARATOR = 3 # " | " + + # Core analysis column widths + COL_CORE_SUBCATEGORY = 30 + COL_CORE_SIZE = 12 + COL_CORE_COUNT = 6 + COL_CORE_PERCENT = 10 + + # Calculate the exact table width + table_width = ( + COL_COMPONENT + + COL_SEPARATOR + + COL_FLASH_TEXT + + COL_SEPARATOR + + COL_FLASH_DATA + + COL_SEPARATOR + + COL_RAM_DATA + + COL_SEPARATOR + + COL_RAM_BSS + + COL_SEPARATOR + + COL_TOTAL_FLASH + + COL_SEPARATOR + + COL_TOTAL_RAM + ) + + lines.append("=" * table_width) + lines.append("Component Memory Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Main table - fixed column widths + lines.append( + f"{'Component':<{COL_COMPONENT}} | {'Flash (text)':>{COL_FLASH_TEXT}} | {'Flash (data)':>{COL_FLASH_DATA}} | {'RAM (data)':>{COL_RAM_DATA}} | {'RAM (bss)':>{COL_RAM_BSS}} | {'Total Flash':>{COL_TOTAL_FLASH}} | {'Total RAM':>{COL_TOTAL_RAM}}" + ) + lines.append( + "-" * COL_COMPONENT + + "-+-" + + "-" * COL_FLASH_TEXT + + "-+-" + + "-" * COL_FLASH_DATA + + "-+-" + + "-" * COL_RAM_DATA + + "-+-" + + "-" * COL_RAM_BSS + + "-+-" + + "-" * COL_TOTAL_FLASH + + "-+-" + + "-" * COL_TOTAL_RAM + ) + + for name, mem in components: + if mem.flash_total > 0 or mem.ram_total > 0: + flash_rodata = mem.rodata_size + mem.data_size + lines.append( + f"{name:<{COL_COMPONENT}} | {mem.text_size:>{COL_FLASH_TEXT - 2},} B | {flash_rodata:>{COL_FLASH_DATA - 2},} B | " + f"{mem.data_size:>{COL_RAM_DATA - 2},} B | {mem.bss_size:>{COL_RAM_BSS - 2},} B | " + f"{mem.flash_total:>{COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{COL_TOTAL_RAM - 2},} B" + ) + + lines.append( + "-" * COL_COMPONENT + + "-+-" + + "-" * COL_FLASH_TEXT + + "-+-" + + "-" * COL_FLASH_DATA + + "-+-" + + "-" * COL_RAM_DATA + + "-+-" + + "-" * COL_RAM_BSS + + "-+-" + + "-" * COL_TOTAL_FLASH + + "-+-" + + "-" * COL_TOTAL_RAM + ) + lines.append( + f"{'TOTAL':<{COL_COMPONENT}} | {' ':>{COL_FLASH_TEXT}} | {' ':>{COL_FLASH_DATA}} | " + f"{' ':>{COL_RAM_DATA}} | {' ':>{COL_RAM_BSS}} | " + f"{total_flash:>{COL_TOTAL_FLASH - 2},} B | {total_ram:>{COL_TOTAL_RAM - 2},} B" + ) + + # Top consumers + lines.append("") + lines.append("Top Flash Consumers:") + for i, (name, mem) in enumerate(components[:25]): + if mem.flash_total > 0: + percentage = ( + (mem.flash_total / total_flash * 100) if total_flash > 0 else 0 + ) + lines.append( + f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash" + ) + + lines.append("") + lines.append("Top RAM Consumers:") + ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True) + for i, (name, mem) in enumerate(ram_components[:25]): + if mem.ram_total > 0: + percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0 + lines.append( + f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM" + ) + + lines.append("") + lines.append( + "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." + ) + lines.append("=" * table_width) + + # Add ESPHome core detailed analysis if there are core symbols + if self._esphome_core_symbols: + lines.append("") + lines.append("=" * table_width) + lines.append("[esphome]core Detailed Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Group core symbols by subcategory + core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) + + for symbol, demangled, size in self._esphome_core_symbols: + # Categorize based on demangled name patterns + subcategory = self._categorize_esphome_core_symbol(demangled) + core_subcategories[subcategory].append((symbol, demangled, size)) + + # Sort subcategories by total size + sorted_subcategories = sorted( + [ + (name, symbols, sum(s[2] for s in symbols)) + for name, symbols in core_subcategories.items() + ], + key=lambda x: x[2], + reverse=True, + ) + + lines.append( + f"{'Subcategory':<{COL_CORE_SUBCATEGORY}} | {'Size':>{COL_CORE_SIZE}} | " + f"{'Count':>{COL_CORE_COUNT}} | {'% of Core':>{COL_CORE_PERCENT}}" + ) + lines.append( + "-" * COL_CORE_SUBCATEGORY + + "-+-" + + "-" * COL_CORE_SIZE + + "-+-" + + "-" * COL_CORE_COUNT + + "-+-" + + "-" * COL_CORE_PERCENT + ) + + core_total = sum(size for _, _, size in self._esphome_core_symbols) + + for subcategory, symbols, total_size in sorted_subcategories: + percentage = (total_size / core_total * 100) if core_total > 0 else 0 + lines.append( + f"{subcategory:<{COL_CORE_SUBCATEGORY}} | {total_size:>{COL_CORE_SIZE - 2},} B | " + f"{len(symbols):>{COL_CORE_COUNT}} | {percentage:>{COL_CORE_PERCENT - 1}.1f}%" + ) + + # Top 10 largest core symbols + lines.append("") + lines.append("Top 10 Largest [esphome]core Symbols:") + sorted_core_symbols = sorted( + self._esphome_core_symbols, key=lambda x: x[2], reverse=True + ) + + for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * table_width) + + # Add detailed analysis for top ESPHome and external components + esphome_components = [ + (name, mem) + for name, mem in components + if name.startswith("[esphome]") and name != "[esphome]core" + ] + external_components = [ + (name, mem) for name, mem in components if name.startswith("[external]") + ] + + top_esphome_components = sorted( + esphome_components, key=lambda x: x[1].flash_total, reverse=True + )[:30] + + # Include all external components (they're usually important) + top_external_components = sorted( + external_components, key=lambda x: x[1].flash_total, reverse=True + ) + + # Check if API component exists and ensure it's included + api_component = None + for name, mem in components: + if name == "[esphome]api": + api_component = (name, mem) + break + + # Combine all components to analyze: top ESPHome + all external + API if not already included + components_to_analyze = list(top_esphome_components) + list( + top_external_components + ) + if api_component and api_component not in components_to_analyze: + components_to_analyze.append(api_component) + + if components_to_analyze: + for comp_name, comp_mem in components_to_analyze: + comp_symbols = self._component_symbols.get(comp_name, []) + if comp_symbols: + lines.append("") + lines.append("=" * table_width) + lines.append(f"{comp_name} Detailed Analysis".center(table_width)) + lines.append("=" * table_width) + lines.append("") + + # Sort symbols by size + sorted_symbols = sorted( + comp_symbols, key=lambda x: x[2], reverse=True + ) + + lines.append(f"Total symbols: {len(sorted_symbols)}") + lines.append(f"Total size: {comp_mem.flash_total:,} B") + lines.append("") + + # Show all symbols > 100 bytes for better visibility + large_symbols = [ + (sym, dem, size) + for sym, dem, size in sorted_symbols + if size > 100 + ] + + lines.append( + f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" + ) + for i, (symbol, demangled, size) in enumerate(large_symbols): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * table_width) + + return "\n".join(lines) + + def dump_uncategorized_symbols(self, output_file: str | None = None) -> None: + """Dump uncategorized symbols for analysis.""" + # Sort by size descending + sorted_symbols = sorted( + self._uncategorized_symbols, key=lambda x: x[2], reverse=True + ) + + lines = ["Uncategorized Symbols Analysis", "=" * 80] + lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}") + lines.append( + f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes" + ) + lines.append("") + lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled") + lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) + + for symbol, demangled, size in sorted_symbols[:100]: # Top 100 + if symbol != demangled: + lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled[:100]}") + else: + lines.append(f"{size:>10,} | {symbol[:60]:<60} | [not demangled]") + + if len(sorted_symbols) > 100: + lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") + + content = "\n".join(lines) + + if output_file: + with open(output_file, "w", encoding="utf-8") as f: + f.write(content) + else: + print(content) + + +def analyze_elf( + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + detailed: bool = False, + external_components: set[str] | None = None, +) -> str: + """Analyze an ELF file and return a memory report.""" + analyzer = MemoryAnalyzerCLI( + elf_path, objdump_path, readelf_path, external_components + ) + analyzer.analyze() + return analyzer.generate_report(detailed) + + +def main(): + """CLI entrypoint for memory analysis.""" + if len(sys.argv) < 2: + print("Usage: analyze_memory.py ") + sys.exit(1) + + try: + report = analyze_elf(sys.argv[1]) + print(report) + except (subprocess.CalledProcessError, FileNotFoundError, OSError) as e: + print(f"Error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + main() From 7879df4dd19036928a0a67ad05416182300dbdb4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:57:57 -1000 Subject: [PATCH 025/336] merge --- esphome/analyze_memory/__init__.py | 28 ++++++++++------------------ esphome/analyze_memory/const.py | 9 +++++++++ 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 9c35965b74..8cacc1b513 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -13,6 +13,7 @@ from .const import ( CORE_SUBCATEGORY_PATTERNS, DEMANGLED_PATTERNS, ESPHOME_COMPONENT_PATTERN, + SECTION_MAPPING, SYMBOL_PATTERNS, ) @@ -23,23 +24,21 @@ _LOGGER = logging.getLogger(__name__) @cache def get_esphome_components(): """Get set of actual ESPHome components from the components directory.""" - components = set() - # Find the components directory relative to this file # Go up two levels from analyze_memory/__init__.py to esphome/ current_dir = Path(__file__).parent.parent components_dir = current_dir / "components" - if components_dir.exists() and components_dir.is_dir(): - for item in components_dir.iterdir(): - if ( - item.is_dir() - and not item.name.startswith(".") - and not item.name.startswith("__") - ): - components.add(item.name) + if not components_dir.exists() or not components_dir.is_dir(): + return frozenset() - return components + return frozenset( + item.name + for item in components_dir.iterdir() + if item.is_dir() + and not item.name.startswith(".") + and not item.name.startswith("__") + ) @cache @@ -179,13 +178,6 @@ class MemoryAnalyzer: def _parse_symbols(self) -> None: """Parse symbols from ELF file.""" - # Section mapping - centralizes the logic - SECTION_MAPPING = { - ".text": [".text", ".iram"], - ".rodata": [".rodata"], - ".data": [".data", ".dram"], - ".bss": [".bss"], - } def map_section_name(raw_section: str) -> str | None: """Map raw section name to standard section.""" diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index df37c0b2cd..8543c6ec2b 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -5,6 +5,15 @@ import re # Pattern to extract ESPHome component namespaces dynamically ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") +# Section mapping for ELF file sections +# Maps standard section names to their various platform-specific variants +SECTION_MAPPING = { + ".text": frozenset([".text", ".iram"]), + ".rodata": frozenset([".rodata"]), + ".data": frozenset([".data", ".dram"]), + ".bss": frozenset([".bss"]), +} + # Component identification rules # Symbol patterns: patterns found in raw symbol names SYMBOL_PATTERNS = { From a78a7dfa4e835a084adac775f443b0b75a8f704c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 13:58:59 -1000 Subject: [PATCH 026/336] merge --- esphome/analyze_memory/__init__.py | 37 +++++++++++++++--------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 8cacc1b513..6d70232448 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -20,6 +20,21 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +def _map_section_name(raw_section: str) -> str | None: + """Map raw section name to standard section. + + Args: + raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1") + + Returns: + Standard section name (".text", ".rodata", ".data", ".bss") or None + """ + for standard_section, patterns in SECTION_MAPPING.items(): + if any(pattern in raw_section for pattern in patterns): + return standard_section + return None + + # Get the list of actual ESPHome components by scanning the components directory @cache def get_esphome_components(): @@ -154,17 +169,8 @@ class MemoryAnalyzer: size_hex = match.group(2) size = int(size_hex, 16) - # Map various section names to standard categories - mapped_section = None - if ".text" in section_name or ".iram" in section_name: - mapped_section = ".text" - elif ".rodata" in section_name: - mapped_section = ".rodata" - elif ".data" in section_name and "bss" not in section_name: - mapped_section = ".data" - elif ".bss" in section_name: - mapped_section = ".bss" - + # Map to standard section name + mapped_section = _map_section_name(section_name) if mapped_section: if mapped_section not in self.sections: self.sections[mapped_section] = MemorySection( @@ -179,13 +185,6 @@ class MemoryAnalyzer: def _parse_symbols(self) -> None: """Parse symbols from ELF file.""" - def map_section_name(raw_section: str) -> str | None: - """Map raw section name to standard section.""" - for standard_section, patterns in SECTION_MAPPING.items(): - if any(pattern in raw_section for pattern in patterns): - return standard_section - return None - def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: """Parse a single symbol line from objdump output. @@ -211,7 +210,7 @@ class MemoryAnalyzer: # Find section, size, and name for i, part in enumerate(parts): if part.startswith("."): - section = map_section_name(part) + section = _map_section_name(part) if section and i + 1 < len(parts): try: size = int(parts[i + 1], 16) From a5d6e39b2f8c834c1e82098b753ddb8c4df1a56c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:01:07 -1000 Subject: [PATCH 027/336] merge --- esphome/analyze_memory/__init__.py | 75 ++++++------------------------ 1 file changed, 13 insertions(+), 62 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 6d70232448..11e5b64f7d 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -13,28 +13,13 @@ from .const import ( CORE_SUBCATEGORY_PATTERNS, DEMANGLED_PATTERNS, ESPHOME_COMPONENT_PATTERN, - SECTION_MAPPING, SYMBOL_PATTERNS, ) +from .helpers import map_section_name, parse_symbol_line _LOGGER = logging.getLogger(__name__) -def _map_section_name(raw_section: str) -> str | None: - """Map raw section name to standard section. - - Args: - raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1") - - Returns: - Standard section name (".text", ".rodata", ".data", ".bss") or None - """ - for standard_section, patterns in SECTION_MAPPING.items(): - if any(pattern in raw_section for pattern in patterns): - return standard_section - return None - - # Get the list of actual ESPHome components by scanning the components directory @cache def get_esphome_components(): @@ -170,7 +155,7 @@ class MemoryAnalyzer: size = int(size_hex, 16) # Map to standard section name - mapped_section = _map_section_name(section_name) + mapped_section = map_section_name(section_name) if mapped_section: if mapped_section not in self.sections: self.sections[mapped_section] = MemorySection( @@ -184,44 +169,6 @@ class MemoryAnalyzer: def _parse_symbols(self) -> None: """Parse symbols from ELF file.""" - - def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: - """Parse a single symbol line from objdump output. - - Returns (section, name, size, address) or None if not a valid symbol. - Format: address l/g w/d F/O section size name - Example: 40084870 l F .iram0.text 00000000 _xt_user_exc - """ - parts = line.split() - if len(parts) < 5: - return None - - try: - # Validate and extract address - address = parts[0] - int(address, 16) - except ValueError: - return None - - # Look for F (function) or O (object) flag - if "F" not in parts and "O" not in parts: - return None - - # Find section, size, and name - for i, part in enumerate(parts): - if part.startswith("."): - section = _map_section_name(part) - if section and i + 1 < len(parts): - try: - size = int(parts[i + 1], 16) - if i + 2 < len(parts) and size > 0: - name = " ".join(parts[i + 2 :]) - return (section, name, size, address) - except ValueError: - pass - break - return None - try: result = subprocess.run( [self.objdump_path, "-t", str(self.elf_path)], @@ -234,13 +181,17 @@ class MemoryAnalyzer: seen_addresses: set[str] = set() for line in result.stdout.splitlines(): - symbol_info = parse_symbol_line(line) - if symbol_info: - section, name, size, address = symbol_info - # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) - if address not in seen_addresses and section in self.sections: - self.sections[section].symbols.append((name, size, "")) - seen_addresses.add(address) + if not (symbol_info := parse_symbol_line(line)): + continue + + section, name, size, address = symbol_info + + # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) + if address in seen_addresses or section not in self.sections: + continue + + self.sections[section].symbols.append((name, size, "")) + seen_addresses.add(address) except subprocess.CalledProcessError as e: _LOGGER.error("Failed to parse symbols: %s", e) From 79aafe2cd51dc31877b800d8ea989518e304b1cd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:01:21 -1000 Subject: [PATCH 028/336] merge --- esphome/analyze_memory/helpers.py | 72 +++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 esphome/analyze_memory/helpers.py diff --git a/esphome/analyze_memory/helpers.py b/esphome/analyze_memory/helpers.py new file mode 100644 index 0000000000..c529aad52a --- /dev/null +++ b/esphome/analyze_memory/helpers.py @@ -0,0 +1,72 @@ +"""Helper functions for memory analysis.""" + +from .const import SECTION_MAPPING + + +def map_section_name(raw_section: str) -> str | None: + """Map raw section name to standard section. + + Args: + raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1") + + Returns: + Standard section name (".text", ".rodata", ".data", ".bss") or None + """ + for standard_section, patterns in SECTION_MAPPING.items(): + if any(pattern in raw_section for pattern in patterns): + return standard_section + return None + + +def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: + """Parse a single symbol line from objdump output. + + Args: + line: Line from objdump -t output + + Returns: + Tuple of (section, name, size, address) or None if not a valid symbol. + Format: address l/g w/d F/O section size name + Example: 40084870 l F .iram0.text 00000000 _xt_user_exc + """ + parts = line.split() + if len(parts) < 5: + return None + + try: + # Validate and extract address + address = parts[0] + int(address, 16) + except ValueError: + return None + + # Look for F (function) or O (object) flag + if "F" not in parts and "O" not in parts: + return None + + # Find section, size, and name + for i, part in enumerate(parts): + if not part.startswith("."): + continue + + section = map_section_name(part) + if not section: + break + + # Need at least size field after section + if i + 1 >= len(parts): + break + + try: + size = int(parts[i + 1], 16) + except ValueError: + break + + # Need symbol name and non-zero size + if i + 2 >= len(parts) or size == 0: + break + + name = " ".join(parts[i + 2 :]) + return (section, name, size, address) + + return None From 86c12079b415fb9b8784fc601023c72f65c4eaf5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:05:24 -1000 Subject: [PATCH 029/336] merge --- esphome/analyze_memory/__init__.py | 12 +- esphome/analyze_memory/cli.py | 186 ++++++++++++++--------------- esphome/analyze_memory/const.py | 9 ++ 3 files changed, 104 insertions(+), 103 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 11e5b64f7d..2a3955144c 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -13,6 +13,7 @@ from .const import ( CORE_SUBCATEGORY_PATTERNS, DEMANGLED_PATTERNS, ESPHOME_COMPONENT_PATTERN, + SECTION_TO_ATTR, SYMBOL_PATTERNS, ) from .helpers import map_section_name, parse_symbol_line @@ -219,14 +220,9 @@ class MemoryAnalyzer: comp_mem = self.components[component] comp_mem.symbol_count += 1 - if section_name == ".text": - comp_mem.text_size += size - elif section_name == ".rodata": - comp_mem.rodata_size += size - elif section_name == ".data": - comp_mem.data_size += size - elif section_name == ".bss": - comp_mem.bss_size += size + # Update the appropriate size attribute based on section + if attr_name := SECTION_TO_ATTR.get(section_name): + setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size) # Track uncategorized symbols if component == "other" and size > 0: diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index ffce04bb6e..07d0a9320e 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -10,6 +10,69 @@ from . import MemoryAnalyzer class MemoryAnalyzerCLI(MemoryAnalyzer): """Memory analyzer with CLI-specific report generation.""" + # Column width constants + COL_COMPONENT: int = 29 + COL_FLASH_TEXT: int = 14 + COL_FLASH_DATA: int = 14 + COL_RAM_DATA: int = 12 + COL_RAM_BSS: int = 12 + COL_TOTAL_FLASH: int = 15 + COL_TOTAL_RAM: int = 12 + COL_SEPARATOR: int = 3 # " | " + + # Core analysis column widths + COL_CORE_SUBCATEGORY: int = 30 + COL_CORE_SIZE: int = 12 + COL_CORE_COUNT: int = 6 + COL_CORE_PERCENT: int = 10 + + # Calculate table width once at class level + TABLE_WIDTH: int = ( + COL_COMPONENT + + COL_SEPARATOR + + COL_FLASH_TEXT + + COL_SEPARATOR + + COL_FLASH_DATA + + COL_SEPARATOR + + COL_RAM_DATA + + COL_SEPARATOR + + COL_RAM_BSS + + COL_SEPARATOR + + COL_TOTAL_FLASH + + COL_SEPARATOR + + COL_TOTAL_RAM + ) + + @staticmethod + def _make_separator_line(*widths: int) -> str: + """Create a separator line with given column widths. + + Args: + widths: Column widths to create separators for + + Returns: + Separator line like "----+---------+-----" + """ + return "-+-".join("-" * width for width in widths) + + # Pre-computed separator lines + MAIN_TABLE_SEPARATOR: str = _make_separator_line( + COL_COMPONENT, + COL_FLASH_TEXT, + COL_FLASH_DATA, + COL_RAM_DATA, + COL_RAM_BSS, + COL_TOTAL_FLASH, + COL_TOTAL_RAM, + ) + + CORE_TABLE_SEPARATOR: str = _make_separator_line( + COL_CORE_SUBCATEGORY, + COL_CORE_SIZE, + COL_CORE_COUNT, + COL_CORE_PERCENT, + ) + def generate_report(self, detailed: bool = False) -> str: """Generate a formatted memory report.""" components = sorted( @@ -23,92 +86,31 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): # Build report lines = [] - # Column width constants - COL_COMPONENT = 29 - COL_FLASH_TEXT = 14 - COL_FLASH_DATA = 14 - COL_RAM_DATA = 12 - COL_RAM_BSS = 12 - COL_TOTAL_FLASH = 15 - COL_TOTAL_RAM = 12 - COL_SEPARATOR = 3 # " | " - - # Core analysis column widths - COL_CORE_SUBCATEGORY = 30 - COL_CORE_SIZE = 12 - COL_CORE_COUNT = 6 - COL_CORE_PERCENT = 10 - - # Calculate the exact table width - table_width = ( - COL_COMPONENT - + COL_SEPARATOR - + COL_FLASH_TEXT - + COL_SEPARATOR - + COL_FLASH_DATA - + COL_SEPARATOR - + COL_RAM_DATA - + COL_SEPARATOR - + COL_RAM_BSS - + COL_SEPARATOR - + COL_TOTAL_FLASH - + COL_SEPARATOR - + COL_TOTAL_RAM - ) - - lines.append("=" * table_width) - lines.append("Component Memory Analysis".center(table_width)) - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) + lines.append("Component Memory Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) lines.append("") # Main table - fixed column widths lines.append( - f"{'Component':<{COL_COMPONENT}} | {'Flash (text)':>{COL_FLASH_TEXT}} | {'Flash (data)':>{COL_FLASH_DATA}} | {'RAM (data)':>{COL_RAM_DATA}} | {'RAM (bss)':>{COL_RAM_BSS}} | {'Total Flash':>{COL_TOTAL_FLASH}} | {'Total RAM':>{COL_TOTAL_RAM}}" - ) - lines.append( - "-" * COL_COMPONENT - + "-+-" - + "-" * COL_FLASH_TEXT - + "-+-" - + "-" * COL_FLASH_DATA - + "-+-" - + "-" * COL_RAM_DATA - + "-+-" - + "-" * COL_RAM_BSS - + "-+-" - + "-" * COL_TOTAL_FLASH - + "-+-" - + "-" * COL_TOTAL_RAM + f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}" ) + lines.append(self.MAIN_TABLE_SEPARATOR) for name, mem in components: if mem.flash_total > 0 or mem.ram_total > 0: flash_rodata = mem.rodata_size + mem.data_size lines.append( - f"{name:<{COL_COMPONENT}} | {mem.text_size:>{COL_FLASH_TEXT - 2},} B | {flash_rodata:>{COL_FLASH_DATA - 2},} B | " - f"{mem.data_size:>{COL_RAM_DATA - 2},} B | {mem.bss_size:>{COL_RAM_BSS - 2},} B | " - f"{mem.flash_total:>{COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{COL_TOTAL_RAM - 2},} B" + f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | " + f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | " + f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B" ) + lines.append(self.MAIN_TABLE_SEPARATOR) lines.append( - "-" * COL_COMPONENT - + "-+-" - + "-" * COL_FLASH_TEXT - + "-+-" - + "-" * COL_FLASH_DATA - + "-+-" - + "-" * COL_RAM_DATA - + "-+-" - + "-" * COL_RAM_BSS - + "-+-" - + "-" * COL_TOTAL_FLASH - + "-+-" - + "-" * COL_TOTAL_RAM - ) - lines.append( - f"{'TOTAL':<{COL_COMPONENT}} | {' ':>{COL_FLASH_TEXT}} | {' ':>{COL_FLASH_DATA}} | " - f"{' ':>{COL_RAM_DATA}} | {' ':>{COL_RAM_BSS}} | " - f"{total_flash:>{COL_TOTAL_FLASH - 2},} B | {total_ram:>{COL_TOTAL_RAM - 2},} B" + f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | " + f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | " + f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B" ) # Top consumers @@ -137,14 +139,14 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): lines.append( "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." ) - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) # Add ESPHome core detailed analysis if there are core symbols if self._esphome_core_symbols: lines.append("") - lines.append("=" * table_width) - lines.append("[esphome]core Detailed Analysis".center(table_width)) - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) + lines.append("[esphome]core Detailed Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) lines.append("") # Group core symbols by subcategory @@ -168,26 +170,18 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): ) lines.append( - f"{'Subcategory':<{COL_CORE_SUBCATEGORY}} | {'Size':>{COL_CORE_SIZE}} | " - f"{'Count':>{COL_CORE_COUNT}} | {'% of Core':>{COL_CORE_PERCENT}}" - ) - lines.append( - "-" * COL_CORE_SUBCATEGORY - + "-+-" - + "-" * COL_CORE_SIZE - + "-+-" - + "-" * COL_CORE_COUNT - + "-+-" - + "-" * COL_CORE_PERCENT + f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | " + f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}" ) + lines.append(self.CORE_TABLE_SEPARATOR) core_total = sum(size for _, _, size in self._esphome_core_symbols) for subcategory, symbols, total_size in sorted_subcategories: percentage = (total_size / core_total * 100) if core_total > 0 else 0 lines.append( - f"{subcategory:<{COL_CORE_SUBCATEGORY}} | {total_size:>{COL_CORE_SIZE - 2},} B | " - f"{len(symbols):>{COL_CORE_COUNT}} | {percentage:>{COL_CORE_PERCENT - 1}.1f}%" + f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | " + f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%" ) # Top 10 largest core symbols @@ -200,7 +194,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): lines.append(f"{i + 1}. {demangled} ({size:,} B)") - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) # Add detailed analysis for top ESPHome and external components esphome_components = [ @@ -240,9 +234,11 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): comp_symbols = self._component_symbols.get(comp_name, []) if comp_symbols: lines.append("") - lines.append("=" * table_width) - lines.append(f"{comp_name} Detailed Analysis".center(table_width)) - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) + lines.append( + f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH) + ) + lines.append("=" * self.TABLE_WIDTH) lines.append("") # Sort symbols by size @@ -267,7 +263,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): for i, (symbol, demangled, size) in enumerate(large_symbols): lines.append(f"{i + 1}. {demangled} ({size:,} B)") - lines.append("=" * table_width) + lines.append("=" * self.TABLE_WIDTH) return "\n".join(lines) diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index 8543c6ec2b..c60b70aeec 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -14,6 +14,15 @@ SECTION_MAPPING = { ".bss": frozenset([".bss"]), } +# Section to ComponentMemory attribute mapping +# Maps section names to the attribute name in ComponentMemory dataclass +SECTION_TO_ATTR = { + ".text": "text_size", + ".rodata": "rodata_size", + ".data": "data_size", + ".bss": "bss_size", +} + # Component identification rules # Symbol patterns: patterns found in raw symbol names SYMBOL_PATTERNS = { From 25fe4a1476b00d6d42588c66f29d52c7f078d33d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:09:08 -1000 Subject: [PATCH 030/336] merge --- esphome/analyze_memory/__init__.py | 93 ++++++++++-------------------- esphome/analyze_memory/cli.py | 58 +++++++++---------- esphome/analyze_memory/helpers.py | 44 ++++++++++++++ 3 files changed, 100 insertions(+), 95 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 2a3955144c..b76cb4ec3f 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -2,7 +2,6 @@ from collections import defaultdict from dataclasses import dataclass, field -from functools import cache import json import logging from pathlib import Path @@ -16,52 +15,16 @@ from .const import ( SECTION_TO_ATTR, SYMBOL_PATTERNS, ) -from .helpers import map_section_name, parse_symbol_line +from .helpers import ( + get_component_class_patterns, + get_esphome_components, + map_section_name, + parse_symbol_line, +) _LOGGER = logging.getLogger(__name__) -# Get the list of actual ESPHome components by scanning the components directory -@cache -def get_esphome_components(): - """Get set of actual ESPHome components from the components directory.""" - # Find the components directory relative to this file - # Go up two levels from analyze_memory/__init__.py to esphome/ - current_dir = Path(__file__).parent.parent - components_dir = current_dir / "components" - - if not components_dir.exists() or not components_dir.is_dir(): - return frozenset() - - return frozenset( - item.name - for item in components_dir.iterdir() - if item.is_dir() - and not item.name.startswith(".") - and not item.name.startswith("__") - ) - - -@cache -def get_component_class_patterns(component_name: str) -> list[str]: - """Generate component class name patterns for symbol matching. - - Args: - component_name: The component name (e.g., "ota", "wifi", "api") - - Returns: - List of pattern strings to match against demangled symbols - """ - component_upper = component_name.upper() - component_camel = component_name.replace("_", "").title() - return [ - f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent - f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent - f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent - f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent - ] - - @dataclass class MemorySection: """Represents a memory section with its symbols.""" @@ -146,23 +109,26 @@ class MemoryAnalyzer: # Parse section headers for line in result.stdout.splitlines(): # Look for section entries - match = re.match( - r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", - line, - ) - if match: - section_name = match.group(1) - size_hex = match.group(2) - size = int(size_hex, 16) + if not ( + match := re.match( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", + line, + ) + ): + continue - # Map to standard section name - mapped_section = map_section_name(section_name) - if mapped_section: - if mapped_section not in self.sections: - self.sections[mapped_section] = MemorySection( - mapped_section - ) - self.sections[mapped_section].total_size += size + section_name = match.group(1) + size_hex = match.group(2) + size = int(size_hex, 16) + + # Map to standard section name + mapped_section = map_section_name(section_name) + if not mapped_section: + continue + + if mapped_section not in self.sections: + self.sections[mapped_section] = MemorySection(mapped_section) + self.sections[mapped_section].total_size += size except subprocess.CalledProcessError as e: _LOGGER.error("Failed to parse sections: %s", e) @@ -201,10 +167,11 @@ class MemoryAnalyzer: def _categorize_symbols(self) -> None: """Categorize symbols by component.""" # First, collect all unique symbol names for batch demangling - all_symbols = set() - for section in self.sections.values(): - for symbol_name, _, _ in section.symbols: - all_symbols.add(symbol_name) + all_symbols = { + symbol_name + for section in self.sections.values() + for symbol_name, _, _ in section.symbols + } # Batch demangle all symbols at once self._batch_demangle_symbols(list(all_symbols)) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 07d0a9320e..b79a5b6d55 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -231,39 +231,33 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): if components_to_analyze: for comp_name, comp_mem in components_to_analyze: - comp_symbols = self._component_symbols.get(comp_name, []) - if comp_symbols: - lines.append("") - lines.append("=" * self.TABLE_WIDTH) - lines.append( - f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH) - ) - lines.append("=" * self.TABLE_WIDTH) - lines.append("") + if not (comp_symbols := self._component_symbols.get(comp_name, [])): + continue + lines.append("") + lines.append("=" * self.TABLE_WIDTH) + lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") - # Sort symbols by size - sorted_symbols = sorted( - comp_symbols, key=lambda x: x[2], reverse=True - ) + # Sort symbols by size + sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True) - lines.append(f"Total symbols: {len(sorted_symbols)}") - lines.append(f"Total size: {comp_mem.flash_total:,} B") - lines.append("") + lines.append(f"Total symbols: {len(sorted_symbols)}") + lines.append(f"Total size: {comp_mem.flash_total:,} B") + lines.append("") - # Show all symbols > 100 bytes for better visibility - large_symbols = [ - (sym, dem, size) - for sym, dem, size in sorted_symbols - if size > 100 - ] + # Show all symbols > 100 bytes for better visibility + large_symbols = [ + (sym, dem, size) for sym, dem, size in sorted_symbols if size > 100 + ] - lines.append( - f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" - ) - for i, (symbol, demangled, size) in enumerate(large_symbols): - lines.append(f"{i + 1}. {demangled} ({size:,} B)") + lines.append( + f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" + ) + for i, (symbol, demangled, size) in enumerate(large_symbols): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") - lines.append("=" * self.TABLE_WIDTH) + lines.append("=" * self.TABLE_WIDTH) return "\n".join(lines) @@ -284,10 +278,10 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) for symbol, demangled, size in sorted_symbols[:100]: # Top 100 - if symbol != demangled: - lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled[:100]}") - else: - lines.append(f"{size:>10,} | {symbol[:60]:<60} | [not demangled]") + demangled_display = ( + demangled[:100] if symbol != demangled else "[not demangled]" + ) + lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}") if len(sorted_symbols) > 100: lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") diff --git a/esphome/analyze_memory/helpers.py b/esphome/analyze_memory/helpers.py index c529aad52a..1b5a1c67c2 100644 --- a/esphome/analyze_memory/helpers.py +++ b/esphome/analyze_memory/helpers.py @@ -1,8 +1,52 @@ """Helper functions for memory analysis.""" +from functools import cache +from pathlib import Path + from .const import SECTION_MAPPING +# Get the list of actual ESPHome components by scanning the components directory +@cache +def get_esphome_components(): + """Get set of actual ESPHome components from the components directory.""" + # Find the components directory relative to this file + # Go up two levels from analyze_memory/helpers.py to esphome/ + current_dir = Path(__file__).parent.parent + components_dir = current_dir / "components" + + if not components_dir.exists() or not components_dir.is_dir(): + return frozenset() + + return frozenset( + item.name + for item in components_dir.iterdir() + if item.is_dir() + and not item.name.startswith(".") + and not item.name.startswith("__") + ) + + +@cache +def get_component_class_patterns(component_name: str) -> list[str]: + """Generate component class name patterns for symbol matching. + + Args: + component_name: The component name (e.g., "ota", "wifi", "api") + + Returns: + List of pattern strings to match against demangled symbols + """ + component_upper = component_name.upper() + component_camel = component_name.replace("_", "").title() + return [ + f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent + f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent + f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + ] + + def map_section_name(raw_section: str) -> str | None: """Map raw section name to standard section. From 2c86ebaf7ff2fa01b817069568540b2c62b4e03f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:10:23 -1000 Subject: [PATCH 031/336] merge --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6fa8150b93..22ae046246 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -676,13 +676,13 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} - name: Download target ELF artifact - uses: actions/download-artifact@1a18f44933c290e06e7167a92071e78bb20ab94a # v4.4.2 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: memory-impact-target-elf path: ./elf-artifacts/target continue-on-error: true - name: Download PR ELF artifact - uses: actions/download-artifact@1a18f44933c290e06e7167a92071e78bb20ab94a # v4.4.2 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: name: memory-impact-pr-elf path: ./elf-artifacts/pr From 843f590db47ba1cf349c9a21f106af64b4486cbf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:13:25 -1000 Subject: [PATCH 032/336] fix --- esphome/analyze_memory/cli.py | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index b79a5b6d55..675e93ae07 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -313,14 +313,42 @@ def analyze_elf( def main(): """CLI entrypoint for memory analysis.""" if len(sys.argv) < 2: - print("Usage: analyze_memory.py ") + print( + "Usage: python -m esphome.analyze_memory [objdump_path] [readelf_path]" + ) + print("\nExample for ESP8266:") + print(" python -m esphome.analyze_memory firmware.elf \\") + print( + " ~/.platformio/packages/toolchain-xtensa/bin/xtensa-lx106-elf-objdump \\" + ) + print( + " ~/.platformio/packages/toolchain-xtensa/bin/xtensa-lx106-elf-readelf" + ) + print("\nExample for ESP32:") + print(" python -m esphome.analyze_memory firmware.elf \\") + print( + " ~/.platformio/packages/toolchain-xtensa-esp-elf/bin/xtensa-esp32-elf-objdump \\" + ) + print( + " ~/.platformio/packages/toolchain-xtensa-esp-elf/bin/xtensa-esp32-elf-readelf" + ) sys.exit(1) + elf_file = sys.argv[1] + objdump_path = sys.argv[2] if len(sys.argv) > 2 else None + readelf_path = sys.argv[3] if len(sys.argv) > 3 else None + try: - report = analyze_elf(sys.argv[1]) + report = analyze_elf(elf_file, objdump_path, readelf_path) print(report) except (subprocess.CalledProcessError, FileNotFoundError, OSError) as e: - print(f"Error: {e}") + print(f"Error: {e}", file=sys.stderr) + if "readelf" in str(e) or "objdump" in str(e): + print( + "\nHint: You need to specify the toolchain-specific tools.", + file=sys.stderr, + ) + print("See usage above for examples.", file=sys.stderr) sys.exit(1) From f011c44130c07bd873b570e65aa73195026b01f0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:26:44 -1000 Subject: [PATCH 033/336] merge --- .github/workflows/ci.yml | 122 ++++++++++++++++------------- esphome/analyze_memory/cli.py | 9 +++ esphome/platformio_api.py | 101 +++++++++++++++++++++++- script/ci_memory_impact_comment.py | 99 +++++++++++++++++++++-- script/ci_memory_impact_extract.py | 40 +++++++--- script/determine-jobs.py | 98 +++++++++++------------ 6 files changed, 345 insertions(+), 124 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 22ae046246..0842248db9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -548,45 +548,53 @@ jobs: with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} - - name: Compile test configuration and extract memory usage + - name: Build and compile with test_build_components id: extract run: | . venv/bin/activate - component="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }}" + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" - test_file="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).test_file }}" - echo "Compiling $component for $platform using $test_file" - python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ python script/ci_memory_impact_extract.py --output-env - - name: Find and upload ELF file + - name: Find and upload final ELF file run: | - # Find the ELF file - try both common locations - elf_file="" + # Note: test_build_components.py may run multiple builds, but each overwrites + # the previous firmware.elf. The memory totals (RAM/Flash) are already summed + # by ci_memory_impact_extract.py. This ELF is from the last build and is used + # for detailed component breakdown (if available). + mkdir -p ./elf-artifacts/target - # Try .esphome/build first (default location) + # Find the most recent firmware.elf if [ -d ~/.esphome/build ]; then - elf_file=$(find ~/.esphome/build -name "firmware.elf" -o -name "*.elf" | head -1) - fi + elf_file=$(find ~/.esphome/build -name "firmware.elf" -type f -printf '%T@ %p\n' | sort -rn | head -1 | cut -d' ' -f2-) - # Fallback to finding in .platformio if not found - if [ -z "$elf_file" ] && [ -d ~/.platformio ]; then - elf_file=$(find ~/.platformio -name "firmware.elf" | head -1) - fi - - if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then - echo "Found ELF file: $elf_file" - mkdir -p ./elf-artifacts - cp "$elf_file" ./elf-artifacts/target.elf + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then + echo "Found final ELF file: $elf_file" + cp "$elf_file" "./elf-artifacts/target/firmware.elf" + else + echo "Warning: No ELF file found in ~/.esphome/build" + ls -la ~/.esphome/build/ || true + fi else - echo "Warning: No ELF file found in ~/.esphome/build or ~/.platformio" - ls -la ~/.esphome/build/ || true + echo "Warning: ~/.esphome/build directory not found" fi - name: Upload ELF artifact uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: memory-impact-target-elf - path: ./elf-artifacts/target.elf + path: ./elf-artifacts/target/firmware.elf if-no-files-found: warn retention-days: 1 @@ -613,45 +621,53 @@ jobs: with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} - - name: Compile test configuration and extract memory usage + - name: Build and compile with test_build_components id: extract run: | . venv/bin/activate - component="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }}" + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" - test_file="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).test_file }}" - echo "Compiling $component for $platform using $test_file" - python script/test_build_components.py -e compile -c "$component" -t "$platform" --no-grouping 2>&1 | \ + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ python script/ci_memory_impact_extract.py --output-env - - name: Find and upload ELF file + - name: Find and upload final ELF file run: | - # Find the ELF file - try both common locations - elf_file="" + # Note: test_build_components.py may run multiple builds, but each overwrites + # the previous firmware.elf. The memory totals (RAM/Flash) are already summed + # by ci_memory_impact_extract.py. This ELF is from the last build and is used + # for detailed component breakdown (if available). + mkdir -p ./elf-artifacts/pr - # Try .esphome/build first (default location) + # Find the most recent firmware.elf if [ -d ~/.esphome/build ]; then - elf_file=$(find ~/.esphome/build -name "firmware.elf" -o -name "*.elf" | head -1) - fi + elf_file=$(find ~/.esphome/build -name "firmware.elf" -type f -printf '%T@ %p\n' | sort -rn | head -1 | cut -d' ' -f2-) - # Fallback to finding in .platformio if not found - if [ -z "$elf_file" ] && [ -d ~/.platformio ]; then - elf_file=$(find ~/.platformio -name "firmware.elf" | head -1) - fi - - if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then - echo "Found ELF file: $elf_file" - mkdir -p ./elf-artifacts - cp "$elf_file" ./elf-artifacts/pr.elf + if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then + echo "Found final ELF file: $elf_file" + cp "$elf_file" "./elf-artifacts/pr/firmware.elf" + else + echo "Warning: No ELF file found in ~/.esphome/build" + ls -la ~/.esphome/build/ || true + fi else - echo "Warning: No ELF file found in ~/.esphome/build or ~/.platformio" - ls -la ~/.esphome/build/ || true + echo "Warning: ~/.esphome/build directory not found" fi - name: Upload ELF artifact uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: memory-impact-pr-elf - path: ./elf-artifacts/pr.elf + path: ./elf-artifacts/pr/firmware.elf if-no-files-found: warn retention-days: 1 @@ -690,7 +706,7 @@ jobs: - name: Post or update PR comment env: GH_TOKEN: ${{ github.token }} - COMPONENT: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).component }} + COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }} PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }} TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }} TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }} @@ -699,27 +715,27 @@ jobs: run: | . venv/bin/activate - # Check if ELF files exist + # Check if ELF files exist (from final build) target_elf_arg="" pr_elf_arg="" - if [ -f ./elf-artifacts/target/target.elf ]; then + if [ -f ./elf-artifacts/target/firmware.elf ]; then echo "Found target ELF file" - target_elf_arg="--target-elf ./elf-artifacts/target/target.elf" + target_elf_arg="--target-elf ./elf-artifacts/target/firmware.elf" else echo "No target ELF file found" fi - if [ -f ./elf-artifacts/pr/pr.elf ]; then + if [ -f ./elf-artifacts/pr/firmware.elf ]; then echo "Found PR ELF file" - pr_elf_arg="--pr-elf ./elf-artifacts/pr/pr.elf" + pr_elf_arg="--pr-elf ./elf-artifacts/pr/firmware.elf" else echo "No PR ELF file found" fi python script/ci_memory_impact_comment.py \ --pr-number "${{ github.event.pull_request.number }}" \ - --component "$COMPONENT" \ + --components "$COMPONENTS" \ --platform "$PLATFORM" \ --target-ram "$TARGET_RAM" \ --target-flash "$TARGET_FLASH" \ diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 675e93ae07..184f95ffa6 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -316,6 +316,7 @@ def main(): print( "Usage: python -m esphome.analyze_memory [objdump_path] [readelf_path]" ) + print("\nIf objdump/readelf paths are not provided, you must specify them.") print("\nExample for ESP8266:") print(" python -m esphome.analyze_memory firmware.elf \\") print( @@ -332,6 +333,14 @@ def main(): print( " ~/.platformio/packages/toolchain-xtensa-esp-elf/bin/xtensa-esp32-elf-readelf" ) + print("\nExample for ESP32-C3 (RISC-V):") + print(" python -m esphome.analyze_memory firmware.elf \\") + print( + " ~/.platformio/packages/toolchain-riscv32-esp/bin/riscv32-esp-elf-objdump \\" + ) + print( + " ~/.platformio/packages/toolchain-riscv32-esp/bin/riscv32-esp-elf-readelf" + ) sys.exit(1) elf_file = sys.argv[1] diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index 9418c1c7d3..a4b5b432fd 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -145,7 +145,16 @@ def run_compile(config, verbose): args = [] if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]: args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"] - return run_platformio_cli_run(config, verbose, *args) + result = run_platformio_cli_run(config, verbose, *args) + + # Run memory analysis if enabled + if config.get(CONF_ESPHOME, {}).get("analyze_memory", False): + try: + analyze_memory_usage(config) + except Exception as e: + _LOGGER.warning("Failed to analyze memory usage: %s", e) + + return result def _run_idedata(config): @@ -374,3 +383,93 @@ class IDEData: return f"{self.cc_path[:-7]}addr2line.exe" return f"{self.cc_path[:-3]}addr2line" + + @property + def objdump_path(self) -> str: + # replace gcc at end with objdump + + # Windows + if self.cc_path.endswith(".exe"): + return f"{self.cc_path[:-7]}objdump.exe" + + return f"{self.cc_path[:-3]}objdump" + + @property + def readelf_path(self) -> str: + # replace gcc at end with readelf + + # Windows + if self.cc_path.endswith(".exe"): + return f"{self.cc_path[:-7]}readelf.exe" + + return f"{self.cc_path[:-3]}readelf" + + +def analyze_memory_usage(config: dict[str, Any]) -> None: + """Analyze memory usage by component after compilation.""" + # Lazy import to avoid overhead when not needed + from esphome.analyze_memory import MemoryAnalyzer + + idedata = get_idedata(config) + + # Get paths to tools + elf_path = idedata.firmware_elf_path + objdump_path = idedata.objdump_path + readelf_path = idedata.readelf_path + + # Debug logging + _LOGGER.debug("ELF path from idedata: %s", elf_path) + + # Check if file exists + if not Path(elf_path).exists(): + # Try alternate path + alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf")) + if alt_path.exists(): + elf_path = str(alt_path) + _LOGGER.debug("Using alternate ELF path: %s", elf_path) + else: + _LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path) + return + + # Extract external components from config + external_components = set() + + # Get the list of built-in ESPHome components + from esphome.analyze_memory import get_esphome_components + + builtin_components = get_esphome_components() + + # Special non-component keys that appear in configs + NON_COMPONENT_KEYS = { + CONF_ESPHOME, + "substitutions", + "packages", + "globals", + "<<", + } + + # Check all top-level keys in config + for key in config: + if key not in builtin_components and key not in NON_COMPONENT_KEYS: + # This is an external component + external_components.add(key) + + _LOGGER.debug("Detected external components: %s", external_components) + + # Create analyzer and run analysis + analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path, external_components) + analyzer.analyze() + + # Generate and print report + report = analyzer.generate_report() + _LOGGER.info("\n%s", report) + + # Optionally save to file + if config.get(CONF_ESPHOME, {}).get("memory_report_file"): + report_file = Path(config[CONF_ESPHOME]["memory_report_file"]) + if report_file.suffix == ".json": + report_file.write_text(analyzer.to_json()) + _LOGGER.info("Memory report saved to %s", report_file) + else: + report_file.write_text(report) + _LOGGER.info("Memory report saved to %s", report_file) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 0f65e4fbbd..d31868ed1c 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -24,6 +24,57 @@ from esphome.analyze_memory import MemoryAnalyzer # noqa: E402 COMMENT_MARKER = "" +def get_platform_toolchain(platform: str) -> tuple[str | None, str | None]: + """Get platform-specific objdump and readelf paths. + + Args: + platform: Platform name (e.g., "esp8266-ard", "esp32-idf", "esp32-c3-idf") + + Returns: + Tuple of (objdump_path, readelf_path) or (None, None) if not found/supported + """ + from pathlib import Path + + home = Path.home() + platformio_packages = home / ".platformio" / "packages" + + # Map platform to toolchain + toolchain = None + prefix = None + + if "esp8266" in platform: + toolchain = "toolchain-xtensa" + prefix = "xtensa-lx106-elf" + elif "esp32-c" in platform or "esp32-h" in platform or "esp32-p4" in platform: + # RISC-V variants (C2, C3, C5, C6, H2, P4) + toolchain = "toolchain-riscv32-esp" + prefix = "riscv32-esp-elf" + elif "esp32" in platform: + # Xtensa variants (original, S2, S3) + toolchain = "toolchain-xtensa-esp-elf" + if "s2" in platform: + prefix = "xtensa-esp32s2-elf" + elif "s3" in platform: + prefix = "xtensa-esp32s3-elf" + else: + prefix = "xtensa-esp32-elf" + else: + # Other platforms (RP2040, LibreTiny, etc.) - not supported + print(f"Platform {platform} not supported for ELF analysis", file=sys.stderr) + return None, None + + toolchain_path = platformio_packages / toolchain / "bin" + objdump_path = toolchain_path / f"{prefix}-objdump" + readelf_path = toolchain_path / f"{prefix}-readelf" + + if objdump_path.exists() and readelf_path.exists(): + print(f"Using {platform} toolchain: {prefix}", file=sys.stderr) + return str(objdump_path), str(readelf_path) + + print(f"Warning: Toolchain not found at {toolchain_path}", file=sys.stderr) + return None, None + + def format_bytes(bytes_value: int) -> str: """Format bytes value with comma separators. @@ -314,7 +365,7 @@ def create_detailed_breakdown_table( def create_comment_body( - component: str, + components: list[str], platform: str, target_ram: int, target_flash: int, @@ -328,7 +379,7 @@ def create_comment_body( """Create the comment body with memory impact analysis. Args: - component: Component name + components: List of component names (merged config) platform: Platform name target_ram: RAM usage in target branch target_flash: Flash usage in target branch @@ -374,10 +425,18 @@ def create_comment_body( else: print("No ELF files provided, skipping detailed analysis", file=sys.stderr) + # Format components list + if len(components) == 1: + components_str = f"`{components[0]}`" + config_note = "a representative test configuration" + else: + components_str = ", ".join(f"`{c}`" for c in sorted(components)) + config_note = f"a merged configuration with {len(components)} components" + return f"""{COMMENT_MARKER} ## Memory Impact Analysis -**Component:** `{component}` +**Components:** {components_str} **Platform:** `{platform}` | Metric | Target Branch | This PR | Change | @@ -386,7 +445,7 @@ def create_comment_body( | **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | {component_breakdown}{symbol_changes} --- -*This analysis runs automatically when a single component changes. Memory usage is measured from a representative test configuration.* +*This analysis runs automatically when components change. Memory usage is measured from {config_note}.* """ @@ -537,7 +596,11 @@ def main() -> int: description="Post or update PR comment with memory impact analysis" ) parser.add_argument("--pr-number", required=True, help="PR number") - parser.add_argument("--component", required=True, help="Component name") + parser.add_argument( + "--components", + required=True, + help='JSON array of component names (e.g., \'["api", "wifi"]\')', + ) parser.add_argument("--platform", required=True, help="Platform name") parser.add_argument( "--target-ram", type=int, required=True, help="Target branch RAM usage" @@ -560,9 +623,29 @@ def main() -> int: args = parser.parse_args() + # Parse components from JSON + try: + components = json.loads(args.components) + if not isinstance(components, list): + print("Error: --components must be a JSON array", file=sys.stderr) + sys.exit(1) + except json.JSONDecodeError as e: + print(f"Error parsing --components JSON: {e}", file=sys.stderr) + sys.exit(1) + + # Detect platform-specific toolchain paths + objdump_path = args.objdump_path + readelf_path = args.readelf_path + + if not objdump_path or not readelf_path: + # Auto-detect based on platform + objdump_path, readelf_path = get_platform_toolchain(args.platform) + # Create comment body + # Note: ELF files (if provided) are from the final build when test_build_components + # runs multiple builds. Memory totals (RAM/Flash) are already summed across all builds. comment_body = create_comment_body( - component=args.component, + components=components, platform=args.platform, target_ram=args.target_ram, target_flash=args.target_flash, @@ -570,8 +653,8 @@ def main() -> int: pr_flash=args.pr_flash, target_elf=args.target_elf, pr_elf=args.pr_elf, - objdump_path=args.objdump_path, - readelf_path=args.readelf_path, + objdump_path=objdump_path, + readelf_path=readelf_path, ) # Post or update comment diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 9ddd39096f..1b8a994f14 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -28,27 +28,36 @@ from script.ci_helpers import write_github_output def extract_from_compile_output(output_text: str) -> tuple[int | None, int | None]: """Extract memory usage from PlatformIO compile output. + Supports multiple builds (for component groups or isolated components). + When test_build_components.py creates multiple builds, this sums the + memory usage across all builds. + Looks for lines like: RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) Args: - output_text: Compile output text + output_text: Compile output text (may contain multiple builds) Returns: - Tuple of (ram_bytes, flash_bytes) or (None, None) if not found + Tuple of (total_ram_bytes, total_flash_bytes) or (None, None) if not found """ - ram_match = re.search( + # Find all RAM and Flash matches (may be multiple builds) + ram_matches = re.findall( r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text ) - flash_match = re.search( + flash_matches = re.findall( r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text ) - if ram_match and flash_match: - return int(ram_match.group(1)), int(flash_match.group(1)) + if not ram_matches or not flash_matches: + return None, None - return None, None + # Sum all builds (handles multiple component groups) + total_ram = sum(int(match) for match in ram_matches) + total_flash = sum(int(match) for match in flash_matches) + + return total_ram, total_flash def main() -> int: @@ -83,8 +92,21 @@ def main() -> int: ) return 1 - print(f"RAM: {ram_bytes} bytes", file=sys.stderr) - print(f"Flash: {flash_bytes} bytes", file=sys.stderr) + # Count how many builds were found + num_builds = len( + re.findall( + r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", compile_output + ) + ) + + if num_builds > 1: + print( + f"Found {num_builds} builds - summing memory usage across all builds", + file=sys.stderr, + ) + + print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) + print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) if args.output_env: # Output to GitHub Actions diff --git a/script/determine-jobs.py b/script/determine-jobs.py index fa44941c29..56de0e77ba 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -237,14 +237,14 @@ def _component_has_tests(component: str) -> bool: return any(tests_dir.glob("test.*.yaml")) -def detect_single_component_for_memory_impact( +def detect_memory_impact_config( branch: str | None = None, ) -> dict[str, Any]: - """Detect if exactly one component changed for memory impact analysis. + """Determine memory impact analysis configuration. - This analyzes the actual changed files (not dependencies) to determine if - exactly one component has been modified. This is different from the - changed_components list which includes all dependencies. + Always runs memory impact analysis when there are changed components, + building a merged configuration with all changed components (like + test_build_components.py does) to get comprehensive memory analysis. Args: branch: Branch to compare against @@ -252,37 +252,25 @@ def detect_single_component_for_memory_impact( Returns: Dictionary with memory impact analysis parameters: - should_run: "true" or "false" - - component: component name (if should_run is true) - - test_file: test file name (if should_run is true) - - platform: platform name (if should_run is true) + - components: list of component names to analyze + - platform: platform name for the merged build + - use_merged_config: "true" (always use merged config) """ # Platform preference order for memory impact analysis - # Ordered by production relevance and memory constraint importance + # Prefer ESP8266 for memory impact as it's the most constrained platform PLATFORM_PREFERENCE = [ + "esp8266-ard", # ESP8266 Arduino (most memory constrained - best for impact analysis) "esp32-idf", # Primary ESP32 IDF platform "esp32-c3-idf", # ESP32-C3 IDF "esp32-c6-idf", # ESP32-C6 IDF "esp32-s2-idf", # ESP32-S2 IDF "esp32-s3-idf", # ESP32-S3 IDF - "esp32-c2-idf", # ESP32-C2 IDF - "esp32-c5-idf", # ESP32-C5 IDF - "esp32-h2-idf", # ESP32-H2 IDF - "esp32-p4-idf", # ESP32-P4 IDF - "esp8266-ard", # ESP8266 Arduino (memory constrained) - "esp32-ard", # ESP32 Arduino - "esp32-c3-ard", # ESP32-C3 Arduino - "esp32-s2-ard", # ESP32-S2 Arduino - "esp32-s3-ard", # ESP32-S3 Arduino - "bk72xx-ard", # BK72xx Arduino - "rp2040-ard", # RP2040 Arduino - "nrf52-adafruit", # nRF52 Adafruit - "host", # Host platform (development/testing) ] # Get actually changed files (not dependencies) files = changed_files(branch) - # Find all changed components (excluding core) + # Find all changed components (excluding core and base bus components) changed_component_set = set() for file in files: @@ -291,49 +279,53 @@ def detect_single_component_for_memory_impact( if len(parts) >= 3: component = parts[2] # Skip base bus components as they're used across many builds - if component not in ["i2c", "spi", "uart", "modbus"]: + if component not in ["i2c", "spi", "uart", "modbus", "canbus"]: changed_component_set.add(component) - # Only proceed if exactly one component changed - if len(changed_component_set) != 1: + # If no components changed, don't run memory impact + if not changed_component_set: return {"should_run": "false"} - component = list(changed_component_set)[0] + # Find components that have tests on the preferred platform + components_with_tests = [] + selected_platform = None - # Find a test configuration for this component - tests_dir = Path(root_path) / "tests" / "components" / component + for component in sorted(changed_component_set): + tests_dir = Path(root_path) / "tests" / "components" / component + if not tests_dir.exists(): + continue - if not tests_dir.exists(): - return {"should_run": "false"} + # Look for test files on preferred platforms + test_files = list(tests_dir.glob("test.*.yaml")) + if not test_files: + continue - # Look for test files - test_files = list(tests_dir.glob("test.*.yaml")) - if not test_files: - return {"should_run": "false"} - - # Try each preferred platform in order - for preferred_platform in PLATFORM_PREFERENCE: + # Check if component has tests for any preferred platform for test_file in test_files: parts = test_file.stem.split(".") if len(parts) >= 2: platform = parts[1] - if platform == preferred_platform: - return { - "should_run": "true", - "component": component, - "test_file": test_file.name, - "platform": platform, - } + if platform in PLATFORM_PREFERENCE: + components_with_tests.append(component) + # Select the most preferred platform across all components + if selected_platform is None or PLATFORM_PREFERENCE.index( + platform + ) < PLATFORM_PREFERENCE.index(selected_platform): + selected_platform = platform + break + + # If no components have tests, don't run memory impact + if not components_with_tests: + return {"should_run": "false"} + + # Use the most preferred platform found, or fall back to esp8266-ard + platform = selected_platform or "esp8266-ard" - # Fall back to first test file - test_file = test_files[0] - parts = test_file.stem.split(".") - platform = parts[1] if len(parts) >= 2 else "esp32-idf" return { "should_run": "true", - "component": component, - "test_file": test_file.name, + "components": components_with_tests, "platform": platform, + "use_merged_config": "true", } @@ -386,8 +378,8 @@ def main() -> None: if component not in directly_changed_components ] - # Detect single component change for memory impact analysis - memory_impact = detect_single_component_for_memory_impact(args.branch) + # Detect components for memory impact analysis (merged config) + memory_impact = detect_memory_impact_config(args.branch) # Build output output: dict[str, Any] = { From f87c969b4315a67e54237f40e50455b7f0ea2fd8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:40:45 -1000 Subject: [PATCH 034/336] tweak --- .github/workflows/ci.yml | 134 ++++++++++---------- esphome/analyze_memory/__init__.py | 73 +++++++++++ esphome/analyze_memory/cli.py | 119 ++++++++++++------ esphome/platformio_api.py | 9 +- script/ci_memory_impact_comment.py | 188 ++++++++--------------------- script/ci_memory_impact_extract.py | 112 +++++++++++++++++ 6 files changed, 381 insertions(+), 254 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0842248db9..7a4d8bf929 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -548,7 +548,7 @@ jobs: with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} - - name: Build and compile with test_build_components + - name: Build, compile, and analyze memory id: extract run: | . venv/bin/activate @@ -563,38 +563,32 @@ jobs: component_list=$(echo "$components" | jq -r 'join(",")') echo "Compiling with test_build_components.py..." - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - - name: Find and upload final ELF file - run: | - # Note: test_build_components.py may run multiple builds, but each overwrites - # the previous firmware.elf. The memory totals (RAM/Flash) are already summed - # by ci_memory_impact_extract.py. This ELF is from the last build and is used - # for detailed component breakdown (if available). - mkdir -p ./elf-artifacts/target - # Find the most recent firmware.elf - if [ -d ~/.esphome/build ]; then - elf_file=$(find ~/.esphome/build -name "firmware.elf" -type f -printf '%T@ %p\n' | sort -rn | head -1 | cut -d' ' -f2-) + # Find most recent build directory for detailed analysis + build_dir=$(find ~/.esphome/build -type d -maxdepth 1 -mindepth 1 -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2- || echo "") - if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then - echo "Found final ELF file: $elf_file" - cp "$elf_file" "./elf-artifacts/target/firmware.elf" - else - echo "Warning: No ELF file found in ~/.esphome/build" - ls -la ~/.esphome/build/ || true - fi + # Run build and extract memory, with optional detailed analysis + if [ -n "$build_dir" ]; then + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --build-dir "$build_dir" \ + --output-json memory-analysis-target.json else - echo "Warning: ~/.esphome/build directory not found" + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env fi - - name: Upload ELF artifact + - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: - name: memory-impact-target-elf - path: ./elf-artifacts/target/firmware.elf + name: memory-analysis-target + path: memory-analysis-target.json if-no-files-found: warn retention-days: 1 @@ -621,7 +615,7 @@ jobs: with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} - - name: Build and compile with test_build_components + - name: Build, compile, and analyze memory id: extract run: | . venv/bin/activate @@ -636,38 +630,32 @@ jobs: component_list=$(echo "$components" | jq -r 'join(",")') echo "Compiling with test_build_components.py..." - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - - name: Find and upload final ELF file - run: | - # Note: test_build_components.py may run multiple builds, but each overwrites - # the previous firmware.elf. The memory totals (RAM/Flash) are already summed - # by ci_memory_impact_extract.py. This ELF is from the last build and is used - # for detailed component breakdown (if available). - mkdir -p ./elf-artifacts/pr - # Find the most recent firmware.elf - if [ -d ~/.esphome/build ]; then - elf_file=$(find ~/.esphome/build -name "firmware.elf" -type f -printf '%T@ %p\n' | sort -rn | head -1 | cut -d' ' -f2-) + # Find most recent build directory for detailed analysis + build_dir=$(find ~/.esphome/build -type d -maxdepth 1 -mindepth 1 -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2- || echo "") - if [ -n "$elf_file" ] && [ -f "$elf_file" ]; then - echo "Found final ELF file: $elf_file" - cp "$elf_file" "./elf-artifacts/pr/firmware.elf" - else - echo "Warning: No ELF file found in ~/.esphome/build" - ls -la ~/.esphome/build/ || true - fi + # Run build and extract memory, with optional detailed analysis + if [ -n "$build_dir" ]; then + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --build-dir "$build_dir" \ + --output-json memory-analysis-pr.json else - echo "Warning: ~/.esphome/build directory not found" + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py --output-env fi - - name: Upload ELF artifact + - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: - name: memory-impact-pr-elf - path: ./elf-artifacts/pr/firmware.elf + name: memory-analysis-pr + path: memory-analysis-pr.json if-no-files-found: warn retention-days: 1 @@ -691,17 +679,17 @@ jobs: with: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} - - name: Download target ELF artifact + - name: Download target analysis JSON uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: - name: memory-impact-target-elf - path: ./elf-artifacts/target + name: memory-analysis-target + path: ./memory-analysis continue-on-error: true - - name: Download PR ELF artifact + - name: Download PR analysis JSON uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: - name: memory-impact-pr-elf - path: ./elf-artifacts/pr + name: memory-analysis-pr + path: ./memory-analysis continue-on-error: true - name: Post or update PR comment env: @@ -715,22 +703,22 @@ jobs: run: | . venv/bin/activate - # Check if ELF files exist (from final build) - target_elf_arg="" - pr_elf_arg="" + # Check if analysis JSON files exist + target_json_arg="" + pr_json_arg="" - if [ -f ./elf-artifacts/target/firmware.elf ]; then - echo "Found target ELF file" - target_elf_arg="--target-elf ./elf-artifacts/target/firmware.elf" + if [ -f ./memory-analysis/memory-analysis-target.json ]; then + echo "Found target analysis JSON" + target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json" else - echo "No target ELF file found" + echo "No target analysis JSON found" fi - if [ -f ./elf-artifacts/pr/firmware.elf ]; then - echo "Found PR ELF file" - pr_elf_arg="--pr-elf ./elf-artifacts/pr/firmware.elf" + if [ -f ./memory-analysis/memory-analysis-pr.json ]; then + echo "Found PR analysis JSON" + pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json" else - echo "No PR ELF file found" + echo "No PR analysis JSON found" fi python script/ci_memory_impact_comment.py \ @@ -741,8 +729,8 @@ jobs: --target-flash "$TARGET_FLASH" \ --pr-ram "$PR_RAM" \ --pr-flash "$PR_FLASH" \ - $target_elf_arg \ - $pr_elf_arg + $target_json_arg \ + $pr_json_arg ci-status: name: CI Status diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index b76cb4ec3f..5bd46fd01e 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -7,6 +7,7 @@ import logging from pathlib import Path import re import subprocess +from typing import TYPE_CHECKING from .const import ( CORE_SUBCATEGORY_PATTERNS, @@ -22,9 +23,65 @@ from .helpers import ( parse_symbol_line, ) +if TYPE_CHECKING: + from esphome.platformio_api import IDEData + _LOGGER = logging.getLogger(__name__) +def get_toolchain_for_platform(platform: str) -> tuple[str | None, str | None]: + """Get objdump and readelf paths for a given platform. + + This function auto-detects the correct toolchain based on the platform name, + using the same detection logic as PlatformIO's IDEData class. + + Args: + platform: Platform name (e.g., "esp8266-ard", "esp32-idf", "esp32-c3-idf") + + Returns: + Tuple of (objdump_path, readelf_path) or (None, None) if not found/supported + """ + home = Path.home() + platformio_packages = home / ".platformio" / "packages" + + # Map platform to toolchain and prefix (same logic as PlatformIO uses) + toolchain = None + prefix = None + + if "esp8266" in platform: + toolchain = "toolchain-xtensa" + prefix = "xtensa-lx106-elf" + elif "esp32-c" in platform or "esp32-h" in platform or "esp32-p4" in platform: + # RISC-V variants (C2, C3, C5, C6, H2, P4) + toolchain = "toolchain-riscv32-esp" + prefix = "riscv32-esp-elf" + elif "esp32" in platform: + # Xtensa variants (original, S2, S3) + toolchain = "toolchain-xtensa-esp-elf" + if "s2" in platform: + prefix = "xtensa-esp32s2-elf" + elif "s3" in platform: + prefix = "xtensa-esp32s3-elf" + else: + prefix = "xtensa-esp32-elf" + else: + # Other platforms (RP2040, LibreTiny, etc.) - not supported for ELF analysis + _LOGGER.debug("Platform %s not supported for ELF analysis", platform) + return None, None + + # Construct paths (same pattern as IDEData.objdump_path/readelf_path) + toolchain_path = platformio_packages / toolchain / "bin" + objdump_path = toolchain_path / f"{prefix}-objdump" + readelf_path = toolchain_path / f"{prefix}-readelf" + + if objdump_path.exists() and readelf_path.exists(): + _LOGGER.debug("Found %s toolchain: %s", platform, prefix) + return str(objdump_path), str(readelf_path) + + _LOGGER.warning("Toolchain not found at %s", toolchain_path) + return None, None + + @dataclass class MemorySection: """Represents a memory section with its symbols.""" @@ -67,11 +124,27 @@ class MemoryAnalyzer: objdump_path: str | None = None, readelf_path: str | None = None, external_components: set[str] | None = None, + idedata: "IDEData | None" = None, ): + """Initialize memory analyzer. + + Args: + elf_path: Path to ELF file to analyze + objdump_path: Path to objdump binary (auto-detected from idedata if not provided) + readelf_path: Path to readelf binary (auto-detected from idedata if not provided) + external_components: Set of external component names + idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths + """ self.elf_path = Path(elf_path) if not self.elf_path.exists(): raise FileNotFoundError(f"ELF file not found: {elf_path}") + # Auto-detect toolchain paths from idedata if not provided + if idedata is not None and (objdump_path is None or readelf_path is None): + objdump_path = objdump_path or idedata.objdump_path + readelf_path = readelf_path or idedata.readelf_path + _LOGGER.debug("Using toolchain paths from PlatformIO idedata") + self.objdump_path = objdump_path or "objdump" self.readelf_path = readelf_path or "readelf" self.external_components = external_components or set() diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 184f95ffa6..e8541b1621 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -1,7 +1,6 @@ """CLI interface for memory analysis with report generation.""" from collections import defaultdict -import subprocess import sys from . import MemoryAnalyzer @@ -313,51 +312,91 @@ def analyze_elf( def main(): """CLI entrypoint for memory analysis.""" if len(sys.argv) < 2: - print( - "Usage: python -m esphome.analyze_memory [objdump_path] [readelf_path]" - ) - print("\nIf objdump/readelf paths are not provided, you must specify them.") - print("\nExample for ESP8266:") - print(" python -m esphome.analyze_memory firmware.elf \\") - print( - " ~/.platformio/packages/toolchain-xtensa/bin/xtensa-lx106-elf-objdump \\" - ) - print( - " ~/.platformio/packages/toolchain-xtensa/bin/xtensa-lx106-elf-readelf" - ) - print("\nExample for ESP32:") - print(" python -m esphome.analyze_memory firmware.elf \\") - print( - " ~/.platformio/packages/toolchain-xtensa-esp-elf/bin/xtensa-esp32-elf-objdump \\" - ) - print( - " ~/.platformio/packages/toolchain-xtensa-esp-elf/bin/xtensa-esp32-elf-readelf" - ) - print("\nExample for ESP32-C3 (RISC-V):") - print(" python -m esphome.analyze_memory firmware.elf \\") - print( - " ~/.platformio/packages/toolchain-riscv32-esp/bin/riscv32-esp-elf-objdump \\" - ) - print( - " ~/.platformio/packages/toolchain-riscv32-esp/bin/riscv32-esp-elf-readelf" - ) + print("Usage: python -m esphome.analyze_memory ") + print("\nAnalyze memory usage from an ESPHome build directory.") + print("The build directory should contain firmware.elf and idedata will be") + print("loaded from ~/.esphome/.internal/idedata/.json") + print("\nExamples:") + print(" python -m esphome.analyze_memory ~/.esphome/build/my-device") + print(" python -m esphome.analyze_memory .esphome/build/my-device") + print(" python -m esphome.analyze_memory my-device # Short form") sys.exit(1) - elf_file = sys.argv[1] - objdump_path = sys.argv[2] if len(sys.argv) > 2 else None - readelf_path = sys.argv[3] if len(sys.argv) > 3 else None + build_dir = sys.argv[1] + + # Load build directory + import json + from pathlib import Path + + from esphome.platformio_api import IDEData + + build_path = Path(build_dir) + + # If no path separator in name, assume it's a device name + if "/" not in build_dir and not build_path.is_dir(): + # Try current directory first + cwd_path = Path.cwd() / ".esphome" / "build" / build_dir + if cwd_path.is_dir(): + build_path = cwd_path + print(f"Using build directory: {build_path}", file=sys.stderr) + else: + # Fall back to home directory + build_path = Path.home() / ".esphome" / "build" / build_dir + print(f"Using build directory: {build_path}", file=sys.stderr) + + if not build_path.is_dir(): + print(f"Error: {build_path} is not a directory", file=sys.stderr) + sys.exit(1) + + # Find firmware.elf + elf_file = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_file = str(elf_candidate) + break + + if not elf_file: + print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr) + sys.exit(1) + + # Find idedata.json - check current directory first, then home + device_name = build_path.name + idedata_candidates = [ + Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json", + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + ] + + idedata = None + for idedata_path in idedata_candidates: + if idedata_path.exists(): + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + + if not idedata: + print( + f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})", + file=sys.stderr, + ) try: - report = analyze_elf(elf_file, objdump_path, readelf_path) + analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata) + analyzer.analyze() + report = analyzer.generate_report() print(report) - except (subprocess.CalledProcessError, FileNotFoundError, OSError) as e: + except Exception as e: print(f"Error: {e}", file=sys.stderr) - if "readelf" in str(e) or "objdump" in str(e): - print( - "\nHint: You need to specify the toolchain-specific tools.", - file=sys.stderr, - ) - print("See usage above for examples.", file=sys.stderr) + import traceback + + traceback.print_exc(file=sys.stderr) sys.exit(1) diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index a4b5b432fd..065a8cf896 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -412,10 +412,8 @@ def analyze_memory_usage(config: dict[str, Any]) -> None: idedata = get_idedata(config) - # Get paths to tools + # Get ELF path elf_path = idedata.firmware_elf_path - objdump_path = idedata.objdump_path - readelf_path = idedata.readelf_path # Debug logging _LOGGER.debug("ELF path from idedata: %s", elf_path) @@ -457,7 +455,10 @@ def analyze_memory_usage(config: dict[str, Any]) -> None: _LOGGER.debug("Detected external components: %s", external_components) # Create analyzer and run analysis - analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path, external_components) + # Pass idedata to auto-detect toolchain paths + analyzer = MemoryAnalyzer( + elf_path, external_components=external_components, idedata=idedata + ) analyzer.analyze() # Generate and print report diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index d31868ed1c..c5eb9e701f 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -18,61 +18,31 @@ import sys sys.path.insert(0, str(Path(__file__).parent.parent)) # pylint: disable=wrong-import-position -from esphome.analyze_memory import MemoryAnalyzer # noqa: E402 # Comment marker to identify our memory impact comments COMMENT_MARKER = "" -def get_platform_toolchain(platform: str) -> tuple[str | None, str | None]: - """Get platform-specific objdump and readelf paths. +def load_analysis_json(json_path: str) -> dict | None: + """Load memory analysis results from JSON file. Args: - platform: Platform name (e.g., "esp8266-ard", "esp32-idf", "esp32-c3-idf") + json_path: Path to analysis JSON file Returns: - Tuple of (objdump_path, readelf_path) or (None, None) if not found/supported + Dictionary with analysis results or None if file doesn't exist/can't be loaded """ - from pathlib import Path + json_file = Path(json_path) + if not json_file.exists(): + print(f"Analysis JSON not found: {json_path}", file=sys.stderr) + return None - home = Path.home() - platformio_packages = home / ".platformio" / "packages" - - # Map platform to toolchain - toolchain = None - prefix = None - - if "esp8266" in platform: - toolchain = "toolchain-xtensa" - prefix = "xtensa-lx106-elf" - elif "esp32-c" in platform or "esp32-h" in platform or "esp32-p4" in platform: - # RISC-V variants (C2, C3, C5, C6, H2, P4) - toolchain = "toolchain-riscv32-esp" - prefix = "riscv32-esp-elf" - elif "esp32" in platform: - # Xtensa variants (original, S2, S3) - toolchain = "toolchain-xtensa-esp-elf" - if "s2" in platform: - prefix = "xtensa-esp32s2-elf" - elif "s3" in platform: - prefix = "xtensa-esp32s3-elf" - else: - prefix = "xtensa-esp32-elf" - else: - # Other platforms (RP2040, LibreTiny, etc.) - not supported - print(f"Platform {platform} not supported for ELF analysis", file=sys.stderr) - return None, None - - toolchain_path = platformio_packages / toolchain / "bin" - objdump_path = toolchain_path / f"{prefix}-objdump" - readelf_path = toolchain_path / f"{prefix}-readelf" - - if objdump_path.exists() and readelf_path.exists(): - print(f"Using {platform} toolchain: {prefix}", file=sys.stderr) - return str(objdump_path), str(readelf_path) - - print(f"Warning: Toolchain not found at {toolchain_path}", file=sys.stderr) - return None, None + try: + with open(json_file, encoding="utf-8") as f: + return json.load(f) + except (json.JSONDecodeError, OSError) as e: + print(f"Failed to load analysis JSON: {e}", file=sys.stderr) + return None def format_bytes(bytes_value: int) -> str: @@ -122,56 +92,6 @@ def format_change(before: int, after: int) -> str: return f"{emoji} {delta_str} ({pct_str})" -def run_detailed_analysis( - elf_path: str, objdump_path: str | None = None, readelf_path: str | None = None -) -> tuple[dict | None, dict | None]: - """Run detailed memory analysis on an ELF file. - - Args: - elf_path: Path to ELF file - objdump_path: Optional path to objdump tool - readelf_path: Optional path to readelf tool - - Returns: - Tuple of (component_breakdown, symbol_map) or (None, None) if analysis fails - component_breakdown: Dictionary with component memory breakdown - symbol_map: Dictionary mapping symbol names to their sizes - """ - try: - analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path) - components = analyzer.analyze() - - # Convert ComponentMemory objects to dictionaries - component_result = {} - for name, mem in components.items(): - component_result[name] = { - "text": mem.text_size, - "rodata": mem.rodata_size, - "data": mem.data_size, - "bss": mem.bss_size, - "flash_total": mem.flash_total, - "ram_total": mem.ram_total, - "symbol_count": mem.symbol_count, - } - - # Build symbol map from all sections - symbol_map = {} - for section in analyzer.sections.values(): - for symbol_name, size, _ in section.symbols: - if size > 0: # Only track non-zero sized symbols - # Demangle the symbol for better readability - demangled = analyzer._demangle_symbol(symbol_name) - symbol_map[demangled] = size - - return component_result, symbol_map - except Exception as e: - print(f"Warning: Failed to run detailed analysis: {e}", file=sys.stderr) - import traceback - - traceback.print_exc(file=sys.stderr) - return None, None - - def create_symbol_changes_table( target_symbols: dict | None, pr_symbols: dict | None ) -> str: @@ -371,10 +291,10 @@ def create_comment_body( target_flash: int, pr_ram: int, pr_flash: int, - target_elf: str | None = None, - pr_elf: str | None = None, - objdump_path: str | None = None, - readelf_path: str | None = None, + target_analysis: dict | None = None, + pr_analysis: dict | None = None, + target_symbols: dict | None = None, + pr_symbols: dict | None = None, ) -> str: """Create the comment body with memory impact analysis. @@ -385,10 +305,10 @@ def create_comment_body( target_flash: Flash usage in target branch pr_ram: RAM usage in PR branch pr_flash: Flash usage in PR branch - target_elf: Optional path to target branch ELF file - pr_elf: Optional path to PR branch ELF file - objdump_path: Optional path to objdump tool - readelf_path: Optional path to readelf tool + target_analysis: Optional component breakdown for target branch + pr_analysis: Optional component breakdown for PR branch + target_symbols: Optional symbol map for target branch + pr_symbols: Optional symbol map for PR branch Returns: Formatted comment body @@ -396,29 +316,14 @@ def create_comment_body( ram_change = format_change(target_ram, pr_ram) flash_change = format_change(target_flash, pr_flash) - # Run detailed analysis if ELF files are provided - target_analysis = None - pr_analysis = None - target_symbols = None - pr_symbols = None + # Use provided analysis data if available component_breakdown = "" symbol_changes = "" - if target_elf and pr_elf: - print( - f"Running detailed analysis on {target_elf} and {pr_elf}", file=sys.stderr + if target_analysis and pr_analysis: + component_breakdown = create_detailed_breakdown_table( + target_analysis, pr_analysis ) - target_analysis, target_symbols = run_detailed_analysis( - target_elf, objdump_path, readelf_path - ) - pr_analysis, pr_symbols = run_detailed_analysis( - pr_elf, objdump_path, readelf_path - ) - - if target_analysis and pr_analysis: - component_breakdown = create_detailed_breakdown_table( - target_analysis, pr_analysis - ) if target_symbols and pr_symbols: symbol_changes = create_symbol_changes_table(target_symbols, pr_symbols) @@ -612,13 +517,13 @@ def main() -> int: parser.add_argument( "--pr-flash", type=int, required=True, help="PR branch flash usage" ) - parser.add_argument("--target-elf", help="Optional path to target branch ELF file") - parser.add_argument("--pr-elf", help="Optional path to PR branch ELF file") parser.add_argument( - "--objdump-path", help="Optional path to objdump tool for detailed analysis" + "--target-json", + help="Optional path to target branch analysis JSON (for detailed analysis)", ) parser.add_argument( - "--readelf-path", help="Optional path to readelf tool for detailed analysis" + "--pr-json", + help="Optional path to PR branch analysis JSON (for detailed analysis)", ) args = parser.parse_args() @@ -633,17 +538,26 @@ def main() -> int: print(f"Error parsing --components JSON: {e}", file=sys.stderr) sys.exit(1) - # Detect platform-specific toolchain paths - objdump_path = args.objdump_path - readelf_path = args.readelf_path + # Load analysis JSON files + target_analysis = None + pr_analysis = None + target_symbols = None + pr_symbols = None - if not objdump_path or not readelf_path: - # Auto-detect based on platform - objdump_path, readelf_path = get_platform_toolchain(args.platform) + if args.target_json: + target_data = load_analysis_json(args.target_json) + if target_data and target_data.get("detailed_analysis"): + target_analysis = target_data["detailed_analysis"].get("components") + target_symbols = target_data["detailed_analysis"].get("symbols") + + if args.pr_json: + pr_data = load_analysis_json(args.pr_json) + if pr_data and pr_data.get("detailed_analysis"): + pr_analysis = pr_data["detailed_analysis"].get("components") + pr_symbols = pr_data["detailed_analysis"].get("symbols") # Create comment body - # Note: ELF files (if provided) are from the final build when test_build_components - # runs multiple builds. Memory totals (RAM/Flash) are already summed across all builds. + # Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run. comment_body = create_comment_body( components=components, platform=args.platform, @@ -651,10 +565,10 @@ def main() -> int: target_flash=args.target_flash, pr_ram=args.pr_ram, pr_flash=args.pr_flash, - target_elf=args.target_elf, - pr_elf=args.pr_elf, - objdump_path=objdump_path, - readelf_path=readelf_path, + target_analysis=target_analysis, + pr_analysis=pr_analysis, + target_symbols=target_symbols, + pr_symbols=pr_symbols, ) # Post or update comment diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 1b8a994f14..283b521860 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -9,11 +9,14 @@ The script reads compile output from stdin and looks for the standard PlatformIO output format: RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + +Optionally performs detailed memory analysis if a build directory is provided. """ from __future__ import annotations import argparse +import json from pathlib import Path import re import sys @@ -60,6 +63,87 @@ def extract_from_compile_output(output_text: str) -> tuple[int | None, int | Non return total_ram, total_flash +def run_detailed_analysis(build_dir: str) -> dict | None: + """Run detailed memory analysis on build directory. + + Args: + build_dir: Path to ESPHome build directory + + Returns: + Dictionary with analysis results or None if analysis fails + """ + from esphome.analyze_memory import MemoryAnalyzer + from esphome.platformio_api import IDEData + + build_path = Path(build_dir) + if not build_path.exists(): + print(f"Build directory not found: {build_dir}", file=sys.stderr) + return None + + # Find firmware.elf + elf_path = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_path = str(elf_candidate) + break + + if not elf_path: + print(f"firmware.elf not found in {build_dir}", file=sys.stderr) + return None + + # Find idedata.json + device_name = build_path.name + idedata_path = Path.home() / ".esphome" / "idedata" / f"{device_name}.json" + + idedata = None + if idedata_path.exists(): + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + except (json.JSONDecodeError, OSError) as e: + print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + + try: + analyzer = MemoryAnalyzer(elf_path, idedata=idedata) + components = analyzer.analyze() + + # Convert to JSON-serializable format + result = { + "components": {}, + "symbols": {}, + } + + for name, mem in components.items(): + result["components"][name] = { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + + # Build symbol map + for section in analyzer.sections.values(): + for symbol_name, size, _ in section.symbols: + if size > 0: + demangled = analyzer._demangle_symbol(symbol_name) + result["symbols"][demangled] = size + + return result + except Exception as e: + print(f"Warning: Failed to run detailed analysis: {e}", file=sys.stderr) + import traceback + + traceback.print_exc(file=sys.stderr) + return None + + def main() -> int: """Main entry point.""" parser = argparse.ArgumentParser( @@ -70,6 +154,14 @@ def main() -> int: action="store_true", help="Output to GITHUB_OUTPUT environment file", ) + parser.add_argument( + "--build-dir", + help="Optional build directory for detailed memory analysis", + ) + parser.add_argument( + "--output-json", + help="Optional path to save detailed analysis JSON", + ) args = parser.parse_args() @@ -108,6 +200,26 @@ def main() -> int: print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) + # Run detailed analysis if build directory provided + detailed_analysis = None + if args.build_dir: + print(f"Running detailed analysis on {args.build_dir}", file=sys.stderr) + detailed_analysis = run_detailed_analysis(args.build_dir) + + # Save JSON output if requested + if args.output_json: + output_data = { + "ram_bytes": ram_bytes, + "flash_bytes": flash_bytes, + "detailed_analysis": detailed_analysis, + } + + output_path = Path(args.output_json) + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w", encoding="utf-8") as f: + json.dump(output_data, f, indent=2) + print(f"Saved analysis to {args.output_json}", file=sys.stderr) + if args.output_env: # Output to GitHub Actions write_github_output( From e2101f5a20bd99d105321f2ad83f3d5b89a57d08 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:52:07 -1000 Subject: [PATCH 035/336] tweak --- .github/workflows/ci.yml | 56 ++++++------------- script/ci_memory_impact_extract.py | 87 ++++++++++++++++++++++-------- 2 files changed, 82 insertions(+), 61 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7a4d8bf929..440f64298b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -564,26 +564,14 @@ jobs: echo "Compiling with test_build_components.py..." - # Find most recent build directory for detailed analysis - build_dir=$(find ~/.esphome/build -type d -maxdepth 1 -mindepth 1 -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2- || echo "") - - # Run build and extract memory, with optional detailed analysis - if [ -n "$build_dir" ]; then - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py \ - --output-env \ - --build-dir "$build_dir" \ - --output-json memory-analysis-target.json - else - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - fi + # Run build and extract memory with auto-detection of build directory for detailed analysis + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-target.json - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: @@ -631,26 +619,14 @@ jobs: echo "Compiling with test_build_components.py..." - # Find most recent build directory for detailed analysis - build_dir=$(find ~/.esphome/build -type d -maxdepth 1 -mindepth 1 -printf '%T@ %p\n' 2>/dev/null | sort -rn | head -1 | cut -d' ' -f2- || echo "") - - # Run build and extract memory, with optional detailed analysis - if [ -n "$build_dir" ]; then - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py \ - --output-env \ - --build-dir "$build_dir" \ - --output-json memory-analysis-pr.json - else - python script/test_build_components.py \ - -e compile \ - -c "$component_list" \ - -t "$platform" 2>&1 | \ - python script/ci_memory_impact_extract.py --output-env - fi + # Run build and extract memory with auto-detection of build directory for detailed analysis + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-pr.json - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 283b521860..9a9c294f2e 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -28,8 +28,10 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) from script.ci_helpers import write_github_output -def extract_from_compile_output(output_text: str) -> tuple[int | None, int | None]: - """Extract memory usage from PlatformIO compile output. +def extract_from_compile_output( + output_text: str, +) -> tuple[int | None, int | None, str | None]: + """Extract memory usage and build directory from PlatformIO compile output. Supports multiple builds (for component groups or isolated components). When test_build_components.py creates multiple builds, this sums the @@ -39,11 +41,14 @@ def extract_from_compile_output(output_text: str) -> tuple[int | None, int | Non RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + Also extracts build directory from lines like: + INFO Deleting /path/to/build/.esphome/build/componenttestesp8266ard/.pioenvs + Args: output_text: Compile output text (may contain multiple builds) Returns: - Tuple of (total_ram_bytes, total_flash_bytes) or (None, None) if not found + Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found """ # Find all RAM and Flash matches (may be multiple builds) ram_matches = re.findall( @@ -54,13 +59,21 @@ def extract_from_compile_output(output_text: str) -> tuple[int | None, int | Non ) if not ram_matches or not flash_matches: - return None, None + return None, None, None # Sum all builds (handles multiple component groups) total_ram = sum(int(match) for match in ram_matches) total_flash = sum(int(match) for match in flash_matches) - return total_ram, total_flash + # Extract build directory from ESPHome's delete messages + # Look for: INFO Deleting /path/to/build/.esphome/build/componenttest.../.pioenvs + build_dir = None + if match := re.search( + r"INFO Deleting (.+/\.esphome/build/componenttest[^/]+)/\.pioenvs", output_text + ): + build_dir = match.group(1) + + return total_ram, total_flash, build_dir def run_detailed_analysis(build_dir: str) -> dict | None: @@ -94,18 +107,31 @@ def run_detailed_analysis(build_dir: str) -> dict | None: print(f"firmware.elf not found in {build_dir}", file=sys.stderr) return None - # Find idedata.json + # Find idedata.json - check multiple locations device_name = build_path.name - idedata_path = Path.home() / ".esphome" / "idedata" / f"{device_name}.json" + idedata_candidates = [ + # In .pioenvs for test builds + build_path / ".pioenvs" / device_name / "idedata.json", + # In .esphome/idedata for regular builds + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + # Check parent directories for .esphome/idedata (for test_build_components) + build_path.parent.parent.parent / "idedata" / f"{device_name}.json", + ] idedata = None - if idedata_path.exists(): - try: - with open(idedata_path, encoding="utf-8") as f: - raw_data = json.load(f) - idedata = IDEData(raw_data) - except (json.JSONDecodeError, OSError) as e: - print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + for idedata_path in idedata_candidates: + if idedata_path.exists(): + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print( + f"Warning: Failed to load idedata from {idedata_path}: {e}", + file=sys.stderr, + ) try: analyzer = MemoryAnalyzer(elf_path, idedata=idedata) @@ -156,20 +182,26 @@ def main() -> int: ) parser.add_argument( "--build-dir", - help="Optional build directory for detailed memory analysis", + help="Optional build directory for detailed memory analysis (overrides auto-detection)", ) parser.add_argument( "--output-json", help="Optional path to save detailed analysis JSON", ) + parser.add_argument( + "--output-build-dir", + help="Optional path to write the detected build directory", + ) args = parser.parse_args() # Read compile output from stdin compile_output = sys.stdin.read() - # Extract memory usage - ram_bytes, flash_bytes = extract_from_compile_output(compile_output) + # Extract memory usage and build directory + ram_bytes, flash_bytes, detected_build_dir = extract_from_compile_output( + compile_output + ) if ram_bytes is None or flash_bytes is None: print("Failed to extract memory usage from compile output", file=sys.stderr) @@ -200,11 +232,24 @@ def main() -> int: print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) - # Run detailed analysis if build directory provided + # Determine which build directory to use (explicit arg overrides auto-detection) + build_dir = args.build_dir or detected_build_dir + + if detected_build_dir: + print(f"Detected build directory: {detected_build_dir}", file=sys.stderr) + + # Write build directory to file if requested + if args.output_build_dir and build_dir: + build_dir_path = Path(args.output_build_dir) + build_dir_path.parent.mkdir(parents=True, exist_ok=True) + build_dir_path.write_text(build_dir) + print(f"Wrote build directory to {args.output_build_dir}", file=sys.stderr) + + # Run detailed analysis if build directory available detailed_analysis = None - if args.build_dir: - print(f"Running detailed analysis on {args.build_dir}", file=sys.stderr) - detailed_analysis = run_detailed_analysis(args.build_dir) + if build_dir: + print(f"Running detailed analysis on {build_dir}", file=sys.stderr) + detailed_analysis = run_detailed_analysis(build_dir) # Save JSON output if requested if args.output_json: From b0ada914bcf19b86699e61019e1977f5ea9d647d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 14:57:45 -1000 Subject: [PATCH 036/336] tweak --- esphome/__main__.py | 4 +++- script/ci_memory_impact_extract.py | 10 ++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/esphome/__main__.py b/esphome/__main__.py index d9bdfb175b..a0b7d16ae9 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -466,7 +466,9 @@ def write_cpp_file() -> int: def compile_program(args: ArgsProtocol, config: ConfigType) -> int: from esphome import platformio_api - _LOGGER.info("Compiling app...") + # NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py + # If you change this format, update the regex in that script as well + _LOGGER.info("Compiling app... Build path: %s", CORE.build_path) rc = platformio_api.run_compile(config, CORE.verbose) if rc != 0: return rc diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 9a9c294f2e..97f3750950 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -65,13 +65,11 @@ def extract_from_compile_output( total_ram = sum(int(match) for match in ram_matches) total_flash = sum(int(match) for match in flash_matches) - # Extract build directory from ESPHome's delete messages - # Look for: INFO Deleting /path/to/build/.esphome/build/componenttest.../.pioenvs + # Extract build directory from ESPHome's explicit build path output + # Look for: INFO Compiling app... Build path: /path/to/build build_dir = None - if match := re.search( - r"INFO Deleting (.+/\.esphome/build/componenttest[^/]+)/\.pioenvs", output_text - ): - build_dir = match.group(1) + if match := re.search(r"Build path: (.+)", output_text): + build_dir = match.group(1).strip() return total_ram, total_flash, build_dir From e1e047c53fd5a8cf90d16ade711fb81a9360017d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:02:09 -1000 Subject: [PATCH 037/336] tweak --- .github/workflows/ci.yml | 4 ++ esphome/platformio_api.py | 82 +----------------------------- script/ci_memory_impact_extract.py | 24 ++++----- script/determine-jobs.py | 22 ++++---- 4 files changed, 29 insertions(+), 103 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 440f64298b..0935fe609c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -565,10 +565,12 @@ jobs: echo "Compiling with test_build_components.py..." # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script python script/test_build_components.py \ -e compile \ -c "$component_list" \ -t "$platform" 2>&1 | \ + tee /dev/stderr | \ python script/ci_memory_impact_extract.py \ --output-env \ --output-json memory-analysis-target.json @@ -620,10 +622,12 @@ jobs: echo "Compiling with test_build_components.py..." # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script python script/test_build_components.py \ -e compile \ -c "$component_list" \ -t "$platform" 2>&1 | \ + tee /dev/stderr | \ python script/ci_memory_impact_extract.py \ --output-env \ --output-json memory-analysis-pr.json diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index 065a8cf896..cc48562b4c 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -145,16 +145,7 @@ def run_compile(config, verbose): args = [] if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]: args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"] - result = run_platformio_cli_run(config, verbose, *args) - - # Run memory analysis if enabled - if config.get(CONF_ESPHOME, {}).get("analyze_memory", False): - try: - analyze_memory_usage(config) - except Exception as e: - _LOGGER.warning("Failed to analyze memory usage: %s", e) - - return result + return run_platformio_cli_run(config, verbose, *args) def _run_idedata(config): @@ -403,74 +394,3 @@ class IDEData: return f"{self.cc_path[:-7]}readelf.exe" return f"{self.cc_path[:-3]}readelf" - - -def analyze_memory_usage(config: dict[str, Any]) -> None: - """Analyze memory usage by component after compilation.""" - # Lazy import to avoid overhead when not needed - from esphome.analyze_memory import MemoryAnalyzer - - idedata = get_idedata(config) - - # Get ELF path - elf_path = idedata.firmware_elf_path - - # Debug logging - _LOGGER.debug("ELF path from idedata: %s", elf_path) - - # Check if file exists - if not Path(elf_path).exists(): - # Try alternate path - alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf")) - if alt_path.exists(): - elf_path = str(alt_path) - _LOGGER.debug("Using alternate ELF path: %s", elf_path) - else: - _LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path) - return - - # Extract external components from config - external_components = set() - - # Get the list of built-in ESPHome components - from esphome.analyze_memory import get_esphome_components - - builtin_components = get_esphome_components() - - # Special non-component keys that appear in configs - NON_COMPONENT_KEYS = { - CONF_ESPHOME, - "substitutions", - "packages", - "globals", - "<<", - } - - # Check all top-level keys in config - for key in config: - if key not in builtin_components and key not in NON_COMPONENT_KEYS: - # This is an external component - external_components.add(key) - - _LOGGER.debug("Detected external components: %s", external_components) - - # Create analyzer and run analysis - # Pass idedata to auto-detect toolchain paths - analyzer = MemoryAnalyzer( - elf_path, external_components=external_components, idedata=idedata - ) - analyzer.analyze() - - # Generate and print report - report = analyzer.generate_report() - _LOGGER.info("\n%s", report) - - # Optionally save to file - if config.get(CONF_ESPHOME, {}).get("memory_report_file"): - report_file = Path(config[CONF_ESPHOME]["memory_report_file"]) - if report_file.suffix == ".json": - report_file.write_text(analyzer.to_json()) - _LOGGER.info("Memory report saved to %s", report_file) - else: - report_file.write_text(report) - _LOGGER.info("Memory report saved to %s", report_file) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 97f3750950..7b722fcfd4 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -137,21 +137,21 @@ def run_detailed_analysis(build_dir: str) -> dict | None: # Convert to JSON-serializable format result = { - "components": {}, + "components": { + name: { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + for name, mem in components.items() + }, "symbols": {}, } - for name, mem in components.items(): - result["components"][name] = { - "text": mem.text_size, - "rodata": mem.rodata_size, - "data": mem.data_size, - "bss": mem.bss_size, - "flash_total": mem.flash_total, - "ram_total": mem.ram_total, - "symbol_count": mem.symbol_count, - } - # Build symbol map for section in analyzer.sections.values(): for symbol_name, size, _ in section.symbols: diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 56de0e77ba..bd21926c53 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -303,16 +303,18 @@ def detect_memory_impact_config( # Check if component has tests for any preferred platform for test_file in test_files: parts = test_file.stem.split(".") - if len(parts) >= 2: - platform = parts[1] - if platform in PLATFORM_PREFERENCE: - components_with_tests.append(component) - # Select the most preferred platform across all components - if selected_platform is None or PLATFORM_PREFERENCE.index( - platform - ) < PLATFORM_PREFERENCE.index(selected_platform): - selected_platform = platform - break + if len(parts) < 2: + continue + platform = parts[1] + if platform not in PLATFORM_PREFERENCE: + continue + components_with_tests.append(component) + # Select the most preferred platform across all components + if selected_platform is None or PLATFORM_PREFERENCE.index( + platform + ) < PLATFORM_PREFERENCE.index(selected_platform): + selected_platform = platform + break # If no components have tests, don't run memory impact if not components_with_tests: From 84316d62f9478ecb022fff58cdb6fd1cb16c6d55 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:04:19 -1000 Subject: [PATCH 038/336] tweak --- esphome/analyze_memory/__init__.py | 160 +++++++++-------------------- 1 file changed, 48 insertions(+), 112 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 5bd46fd01e..f2a2628ad8 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -29,59 +29,6 @@ if TYPE_CHECKING: _LOGGER = logging.getLogger(__name__) -def get_toolchain_for_platform(platform: str) -> tuple[str | None, str | None]: - """Get objdump and readelf paths for a given platform. - - This function auto-detects the correct toolchain based on the platform name, - using the same detection logic as PlatformIO's IDEData class. - - Args: - platform: Platform name (e.g., "esp8266-ard", "esp32-idf", "esp32-c3-idf") - - Returns: - Tuple of (objdump_path, readelf_path) or (None, None) if not found/supported - """ - home = Path.home() - platformio_packages = home / ".platformio" / "packages" - - # Map platform to toolchain and prefix (same logic as PlatformIO uses) - toolchain = None - prefix = None - - if "esp8266" in platform: - toolchain = "toolchain-xtensa" - prefix = "xtensa-lx106-elf" - elif "esp32-c" in platform or "esp32-h" in platform or "esp32-p4" in platform: - # RISC-V variants (C2, C3, C5, C6, H2, P4) - toolchain = "toolchain-riscv32-esp" - prefix = "riscv32-esp-elf" - elif "esp32" in platform: - # Xtensa variants (original, S2, S3) - toolchain = "toolchain-xtensa-esp-elf" - if "s2" in platform: - prefix = "xtensa-esp32s2-elf" - elif "s3" in platform: - prefix = "xtensa-esp32s3-elf" - else: - prefix = "xtensa-esp32-elf" - else: - # Other platforms (RP2040, LibreTiny, etc.) - not supported for ELF analysis - _LOGGER.debug("Platform %s not supported for ELF analysis", platform) - return None, None - - # Construct paths (same pattern as IDEData.objdump_path/readelf_path) - toolchain_path = platformio_packages / toolchain / "bin" - objdump_path = toolchain_path / f"{prefix}-objdump" - readelf_path = toolchain_path / f"{prefix}-readelf" - - if objdump_path.exists() and readelf_path.exists(): - _LOGGER.debug("Found %s toolchain: %s", platform, prefix) - return str(objdump_path), str(readelf_path) - - _LOGGER.warning("Toolchain not found at %s", toolchain_path) - return None, None - - @dataclass class MemorySection: """Represents a memory section with its symbols.""" @@ -171,71 +118,61 @@ class MemoryAnalyzer: def _parse_sections(self) -> None: """Parse section headers from ELF file.""" - try: - result = subprocess.run( - [self.readelf_path, "-S", str(self.elf_path)], - capture_output=True, - text=True, - check=True, - ) + result = subprocess.run( + [self.readelf_path, "-S", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) - # Parse section headers - for line in result.stdout.splitlines(): - # Look for section entries - if not ( - match := re.match( - r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", - line, - ) - ): - continue + # Parse section headers + for line in result.stdout.splitlines(): + # Look for section entries + if not ( + match := re.match( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", + line, + ) + ): + continue - section_name = match.group(1) - size_hex = match.group(2) - size = int(size_hex, 16) + section_name = match.group(1) + size_hex = match.group(2) + size = int(size_hex, 16) - # Map to standard section name - mapped_section = map_section_name(section_name) - if not mapped_section: - continue + # Map to standard section name + mapped_section = map_section_name(section_name) + if not mapped_section: + continue - if mapped_section not in self.sections: - self.sections[mapped_section] = MemorySection(mapped_section) - self.sections[mapped_section].total_size += size - - except subprocess.CalledProcessError as e: - _LOGGER.error("Failed to parse sections: %s", e) - raise + if mapped_section not in self.sections: + self.sections[mapped_section] = MemorySection(mapped_section) + self.sections[mapped_section].total_size += size def _parse_symbols(self) -> None: """Parse symbols from ELF file.""" - try: - result = subprocess.run( - [self.objdump_path, "-t", str(self.elf_path)], - capture_output=True, - text=True, - check=True, - ) + result = subprocess.run( + [self.objdump_path, "-t", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) - # Track seen addresses to avoid duplicates - seen_addresses: set[str] = set() + # Track seen addresses to avoid duplicates + seen_addresses: set[str] = set() - for line in result.stdout.splitlines(): - if not (symbol_info := parse_symbol_line(line)): - continue + for line in result.stdout.splitlines(): + if not (symbol_info := parse_symbol_line(line)): + continue - section, name, size, address = symbol_info + section, name, size, address = symbol_info - # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) - if address in seen_addresses or section not in self.sections: - continue + # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) + if address in seen_addresses or section not in self.sections: + continue - self.sections[section].symbols.append((name, size, "")) - seen_addresses.add(address) - - except subprocess.CalledProcessError as e: - _LOGGER.error("Failed to parse symbols: %s", e) - raise + self.sections[section].symbols.append((name, size, "")) + seen_addresses.add(address) def _categorize_symbols(self) -> None: """Categorize symbols by component.""" @@ -373,15 +310,14 @@ class MemoryAnalyzer: # Map original to demangled names for original, demangled in zip(symbols, demangled_lines): self._demangle_cache[original] = demangled - else: - # If batch fails, cache originals - for symbol in symbols: - self._demangle_cache[symbol] = symbol + return except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: # On error, cache originals _LOGGER.debug("Failed to batch demangle symbols: %s", e) - for symbol in symbols: - self._demangle_cache[symbol] = symbol + + # If demangling failed, cache originals + for symbol in symbols: + self._demangle_cache[symbol] = symbol def _demangle_symbol(self, symbol: str) -> str: """Get demangled C++ symbol name from cache.""" From 95a0c9594f3f94bdcb57ac173f6b6ffb49ed8d2b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:12:36 -1000 Subject: [PATCH 039/336] tweak --- script/ci_memory_impact_comment.py | 6 +++--- script/ci_memory_impact_extract.py | 10 ++++++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index c5eb9e701f..140bd2f08e 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -238,7 +238,7 @@ def create_detailed_breakdown_table( # Combine all components from both analyses all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) - # Filter to components that have changed or are significant + # Filter to components that have changed changed_components = [] for comp in all_components: target_mem = target_analysis.get(comp, {}) @@ -247,8 +247,8 @@ def create_detailed_breakdown_table( target_flash = target_mem.get("flash_total", 0) pr_flash = pr_mem.get("flash_total", 0) - # Include if component has changed or is significant (> 1KB) - if target_flash != pr_flash or target_flash > 1024 or pr_flash > 1024: + # Only include if component has changed + if target_flash != pr_flash: delta = pr_flash - target_flash changed_components.append((comp, target_flash, pr_flash, delta)) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 7b722fcfd4..96f947e12a 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -67,6 +67,7 @@ def extract_from_compile_output( # Extract build directory from ESPHome's explicit build path output # Look for: INFO Compiling app... Build path: /path/to/build + # Note: Multiple builds reuse the same build path (each overwrites the previous) build_dir = None if match := re.search(r"Build path: (.+)", output_text): build_dir = match.group(1).strip() @@ -226,6 +227,10 @@ def main() -> int: f"Found {num_builds} builds - summing memory usage across all builds", file=sys.stderr, ) + print( + "WARNING: Detailed analysis will only cover the last build", + file=sys.stderr, + ) print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) @@ -235,6 +240,11 @@ def main() -> int: if detected_build_dir: print(f"Detected build directory: {detected_build_dir}", file=sys.stderr) + if num_builds > 1: + print( + f" (using last of {num_builds} builds for detailed analysis)", + file=sys.stderr, + ) # Write build directory to file if requested if args.output_build_dir and build_dir: From a9e5e4d6d223785117a4facee0ad73f8d9118b52 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:14:00 -1000 Subject: [PATCH 040/336] tweak --- script/ci_memory_impact_comment.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 140bd2f08e..2b747629d5 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -157,8 +157,14 @@ def create_symbol_changes_table( target_str = format_bytes(target_size) pr_str = format_bytes(pr_size) change_str = format_change(target_size, pr_size) - # Truncate very long symbol names - display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." + # Truncate very long symbol names but show full name in title attribute + if len(symbol) <= 100: + display_symbol = symbol + else: + # Use HTML details for very long symbols + display_symbol = ( + f"
{symbol[:97]}...{symbol}
" + ) lines.append( f"| `{display_symbol}` | {target_str} | {pr_str} | {change_str} |" ) @@ -261,8 +267,8 @@ def create_detailed_breakdown_table( # Build table - limit to top 20 changes lines = [ "", - "
", - "📊 Component Memory Breakdown (click to expand)", + "
", + "📊 Component Memory Breakdown", "", "| Component | Target Flash | PR Flash | Change |", "|-----------|--------------|----------|--------|", From 62ce39e4307d8c7b085012e4e72b4586dac8ddf9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:17:15 -1000 Subject: [PATCH 041/336] fix --- esphome/analyze_memory/__init__.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index f2a2628ad8..11e5933911 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -295,6 +295,14 @@ class MemoryAnalyzer: potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") if Path(potential_cppfilt).exists(): cppfilt_cmd = potential_cppfilt + _LOGGER.warning("Using toolchain c++filt: %s", cppfilt_cmd) + else: + _LOGGER.warning( + "Toolchain c++filt not found at %s, using system c++filt", + potential_cppfilt, + ) + else: + _LOGGER.warning("Using system c++filt (objdump_path=%s)", self.objdump_path) try: # Send all symbols to c++filt at once @@ -310,6 +318,9 @@ class MemoryAnalyzer: # Map original to demangled names for original, demangled in zip(symbols, demangled_lines): self._demangle_cache[original] = demangled + # Log symbols that failed to demangle (stayed the same) + if original == demangled and original.startswith("_Z"): + _LOGGER.debug("Failed to demangle symbol: %s", original[:100]) return except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: # On error, cache originals From daa03e5b3c70b39029f01589e1a7b996561d1513 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:17:28 -1000 Subject: [PATCH 042/336] fix --- esphome/analyze_memory/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 11e5933911..af1aee66c8 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -320,7 +320,7 @@ class MemoryAnalyzer: self._demangle_cache[original] = demangled # Log symbols that failed to demangle (stayed the same) if original == demangled and original.startswith("_Z"): - _LOGGER.debug("Failed to demangle symbol: %s", original[:100]) + _LOGGER.debug("Failed to demangle symbol: %s", original) return except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: # On error, cache originals From 3bc0041b948d6e97d1491c3917412e658821985a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:22:06 -1000 Subject: [PATCH 043/336] fix --- script/test_build_components.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/script/test_build_components.py b/script/test_build_components.py index df092c091d..07f2680799 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -82,13 +82,14 @@ def show_disk_space_if_ci(esphome_command: str) -> None: def find_component_tests( - components_dir: Path, component_pattern: str = "*" + components_dir: Path, component_pattern: str = "*", base_only: bool = False ) -> dict[str, list[Path]]: """Find all component test files. Args: components_dir: Path to tests/components directory component_pattern: Glob pattern for component names + base_only: If True, only find base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Dictionary mapping component name to list of test files @@ -99,8 +100,9 @@ def find_component_tests( if not comp_dir.is_dir(): continue - # Find test files matching test.*.yaml or test-*.yaml patterns - for test_file in comp_dir.glob("test[.-]*.yaml"): + # Find test files - either base only (test.*.yaml) or all (test[.-]*.yaml) + pattern = "test.*.yaml" if base_only else "test[.-]*.yaml" + for test_file in comp_dir.glob(pattern): component_tests[comp_dir.name].append(test_file) return dict(component_tests) @@ -931,6 +933,7 @@ def test_components( continue_on_fail: bool, enable_grouping: bool = True, isolated_components: set[str] | None = None, + base_only: bool = False, ) -> int: """Test components with optional intelligent grouping. @@ -944,6 +947,7 @@ def test_components( These are tested WITHOUT --testing-mode to enable full validation (pin conflicts, etc). This is used in CI for directly changed components to catch issues that would be missed with --testing-mode. + base_only: If True, only test base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Exit code (0 for success, 1 for failure) @@ -961,7 +965,7 @@ def test_components( # Find all component tests all_tests = {} for pattern in component_patterns: - all_tests.update(find_component_tests(tests_dir, pattern)) + all_tests.update(find_component_tests(tests_dir, pattern, base_only)) if not all_tests: print(f"No components found matching: {component_patterns}") @@ -1122,6 +1126,11 @@ def main() -> int: "These are tested WITHOUT --testing-mode to enable full validation. " "Used in CI for directly changed components to catch pin conflicts and other issues.", ) + parser.add_argument( + "--base-only", + action="store_true", + help="Only test base test files (test.*.yaml), not variant files (test-*.yaml)", + ) args = parser.parse_args() @@ -1140,6 +1149,7 @@ def main() -> int: continue_on_fail=args.continue_on_fail, enable_grouping=not args.no_grouping, isolated_components=isolated_components, + base_only=args.base_only, ) From 5e9b97283188df8377ec0cf692a397329b950e31 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:24:49 -1000 Subject: [PATCH 044/336] fix --- .github/workflows/ci.yml | 94 +++++++++++++++++++++++++++++- script/ci_memory_impact_comment.py | 16 ++++- 2 files changed, 106 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0935fe609c..74ba831bc4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -533,23 +533,79 @@ jobs: outputs: ram_usage: ${{ steps.extract.outputs.ram_usage }} flash_usage: ${{ steps.extract.outputs.flash_usage }} + cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }} steps: - name: Check out target branch uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ github.base_ref }} + + # Create cache key based on: + # 1. Target branch commit SHA + # 2. Hash of build infrastructure files (scripts and CI workflow) + # 3. Platform being tested + # 4. Component list + - name: Generate cache key + id: cache-key + run: | + # Get the commit SHA of the target branch + target_sha=$(git rev-parse HEAD) + + # Hash the build infrastructure files (all files that affect build/analysis) + infra_hash=$(cat \ + script/test_build_components.py \ + script/ci_memory_impact_extract.py \ + script/analyze_component_buses.py \ + script/merge_component_configs.py \ + script/ci_helpers.py \ + .github/workflows/ci.yml \ + | sha256sum | cut -d' ' -f1) + + # Get platform and components from job inputs + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1) + + # Combine into cache key + cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}" + echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT + echo "Cache key: ${cache_key}" + + # Try to restore cached analysis results + - name: Restore cached memory analysis + id: cache-memory-analysis + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + - name: Cache status + run: | + if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then + echo "✓ Cache hit! Using cached memory analysis results." + echo " Skipping build step to save time." + else + echo "✗ Cache miss. Will build and analyze memory usage." + fi + + # Only restore Python and build if cache miss - name: Restore Python + if: steps.cache-memory-analysis.outputs.cache-hit != 'true' uses: ./.github/actions/restore-python with: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} + - name: Cache platformio + if: steps.cache-memory-analysis.outputs.cache-hit != 'true' uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + - name: Build, compile, and analyze memory - id: extract + if: steps.cache-memory-analysis.outputs.cache-hit != 'true' + id: build run: | . venv/bin/activate components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' @@ -574,12 +630,36 @@ jobs: python script/ci_memory_impact_extract.py \ --output-env \ --output-json memory-analysis-target.json + + # Save build results to cache for future runs + - name: Save memory analysis to cache + if: steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + # Extract outputs from cached or freshly built analysis + - name: Extract memory usage for outputs + id: extract + run: | + if [ -f memory-analysis-target.json ]; then + ram=$(jq -r '.ram_bytes' memory-analysis-target.json) + flash=$(jq -r '.flash_bytes' memory-analysis-target.json) + echo "ram_usage=${ram}" >> $GITHUB_OUTPUT + echo "flash_usage=${flash}" >> $GITHUB_OUTPUT + echo "RAM: ${ram} bytes, Flash: ${flash} bytes" + else + echo "Error: memory-analysis-target.json not found" + exit 1 + fi + - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: memory-analysis-target path: memory-analysis-target.json - if-no-files-found: warn + if-no-files-found: error retention-days: 1 memory-impact-pr-branch: @@ -680,6 +760,7 @@ jobs: TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }} PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }} PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }} + TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }} run: | . venv/bin/activate @@ -701,6 +782,12 @@ jobs: echo "No PR analysis JSON found" fi + # Add cache flag if target was cached + cache_flag="" + if [ "$TARGET_CACHE_HIT" == "true" ]; then + cache_flag="--target-cache-hit" + fi + python script/ci_memory_impact_comment.py \ --pr-number "${{ github.event.pull_request.number }}" \ --components "$COMPONENTS" \ @@ -710,7 +797,8 @@ jobs: --pr-ram "$PR_RAM" \ --pr-flash "$PR_FLASH" \ $target_json_arg \ - $pr_json_arg + $pr_json_arg \ + $cache_flag ci-status: name: CI Status diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 2b747629d5..055c2a9a96 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -301,6 +301,7 @@ def create_comment_body( pr_analysis: dict | None = None, target_symbols: dict | None = None, pr_symbols: dict | None = None, + target_cache_hit: bool = False, ) -> str: """Create the comment body with memory impact analysis. @@ -315,6 +316,7 @@ def create_comment_body( pr_analysis: Optional component breakdown for PR branch target_symbols: Optional symbol map for target branch pr_symbols: Optional symbol map for PR branch + target_cache_hit: Whether target branch analysis was loaded from cache Returns: Formatted comment body @@ -344,6 +346,11 @@ def create_comment_body( components_str = ", ".join(f"`{c}`" for c in sorted(components)) config_note = f"a merged configuration with {len(components)} components" + # Add cache info note if target was cached + cache_note = "" + if target_cache_hit: + cache_note = "\n\n> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI)." + return f"""{COMMENT_MARKER} ## Memory Impact Analysis @@ -354,7 +361,8 @@ def create_comment_body( |--------|--------------|---------|--------| | **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} | | **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | -{component_breakdown}{symbol_changes} +{component_breakdown}{symbol_changes}{cache_note} + --- *This analysis runs automatically when components change. Memory usage is measured from {config_note}.* """ @@ -531,6 +539,11 @@ def main() -> int: "--pr-json", help="Optional path to PR branch analysis JSON (for detailed analysis)", ) + parser.add_argument( + "--target-cache-hit", + action="store_true", + help="Indicates that target branch analysis was loaded from cache", + ) args = parser.parse_args() @@ -575,6 +588,7 @@ def main() -> int: pr_analysis=pr_analysis, target_symbols=target_symbols, pr_symbols=pr_symbols, + target_cache_hit=args.target_cache_hit, ) # Post or update comment From 922c2bcd5aa87b024b4a32671bc14596aecd63d8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:26:55 -1000 Subject: [PATCH 045/336] fix --- esphome/analyze_memory/__init__.py | 36 +++++++++++++++++++++++++----- script/ci_memory_impact_comment.py | 10 ++++----- 2 files changed, 35 insertions(+), 11 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index af1aee66c8..cb8fb94c14 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -289,20 +289,26 @@ class MemoryAnalyzer: # Try to find the appropriate c++filt for the platform cppfilt_cmd = "c++filt" + _LOGGER.warning("Demangling %d symbols", len(symbols)) + _LOGGER.warning("objdump_path = %s", self.objdump_path) + # Check if we have a toolchain-specific c++filt if self.objdump_path and self.objdump_path != "objdump": # Replace objdump with c++filt in the path potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") + _LOGGER.warning("Checking for toolchain c++filt at: %s", potential_cppfilt) if Path(potential_cppfilt).exists(): cppfilt_cmd = potential_cppfilt - _LOGGER.warning("Using toolchain c++filt: %s", cppfilt_cmd) + _LOGGER.warning("✓ Using toolchain c++filt: %s", cppfilt_cmd) else: _LOGGER.warning( - "Toolchain c++filt not found at %s, using system c++filt", + "✗ Toolchain c++filt not found at %s, using system c++filt", potential_cppfilt, ) else: - _LOGGER.warning("Using system c++filt (objdump_path=%s)", self.objdump_path) + _LOGGER.warning( + "✗ Using system c++filt (objdump_path=%s)", self.objdump_path + ) try: # Send all symbols to c++filt at once @@ -316,15 +322,35 @@ class MemoryAnalyzer: if result.returncode == 0: demangled_lines = result.stdout.strip().split("\n") # Map original to demangled names + failed_count = 0 for original, demangled in zip(symbols, demangled_lines): self._demangle_cache[original] = demangled # Log symbols that failed to demangle (stayed the same) if original == demangled and original.startswith("_Z"): - _LOGGER.debug("Failed to demangle symbol: %s", original) + failed_count += 1 + if failed_count <= 5: # Only log first 5 failures + _LOGGER.warning("Failed to demangle: %s", original[:100]) + + if failed_count > 0: + _LOGGER.warning( + "Failed to demangle %d/%d symbols using %s", + failed_count, + len(symbols), + cppfilt_cmd, + ) + else: + _LOGGER.warning( + "Successfully demangled all %d symbols", len(symbols) + ) return + _LOGGER.warning( + "c++filt exited with code %d: %s", + result.returncode, + result.stderr[:200] if result.stderr else "(no error output)", + ) except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: # On error, cache originals - _LOGGER.debug("Failed to batch demangle symbols: %s", e) + _LOGGER.warning("Failed to batch demangle symbols: %s", e) # If demangling failed, cache originals for symbol in symbols: diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 055c2a9a96..84e821cbec 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -159,14 +159,12 @@ def create_symbol_changes_table( change_str = format_change(target_size, pr_size) # Truncate very long symbol names but show full name in title attribute if len(symbol) <= 100: - display_symbol = symbol + display_symbol = f"`{symbol}`" else: - # Use HTML details for very long symbols - display_symbol = ( - f"
{symbol[:97]}...{symbol}
" - ) + # Use HTML details for very long symbols (no backticks inside HTML) + display_symbol = f"
{symbol[:97]}...{symbol}
" lines.append( - f"| `{display_symbol}` | {target_str} | {pr_str} | {change_str} |" + f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |" ) if len(changed_symbols) > 30: From 57bf3f968ff417ee57b15aaab316ac1256521d30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:34:17 -1000 Subject: [PATCH 046/336] fix --- script/determine-jobs.py | 37 +++++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index bd21926c53..6a24c9eb01 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -258,9 +258,10 @@ def detect_memory_impact_config( """ # Platform preference order for memory impact analysis # Prefer ESP8266 for memory impact as it's the most constrained platform + # ESP32-IDF is preferred over ESP32-Arduino as it's faster to build and more commonly used PLATFORM_PREFERENCE = [ "esp8266-ard", # ESP8266 Arduino (most memory constrained - best for impact analysis) - "esp32-idf", # Primary ESP32 IDF platform + "esp32-idf", # ESP32 IDF platform (primary ESP32 platform, faster builds) "esp32-c3-idf", # ESP32-C3 IDF "esp32-c6-idf", # ESP32-C6 IDF "esp32-s2-idf", # ESP32-S2 IDF @@ -289,6 +290,7 @@ def detect_memory_impact_config( # Find components that have tests on the preferred platform components_with_tests = [] selected_platform = None + component_platforms = {} # Track which platforms each component has for component in sorted(changed_component_set): tests_dir = Path(root_path) / "tests" / "components" / component @@ -301,20 +303,28 @@ def detect_memory_impact_config( continue # Check if component has tests for any preferred platform + available_platforms = [] for test_file in test_files: parts = test_file.stem.split(".") if len(parts) < 2: continue platform = parts[1] - if platform not in PLATFORM_PREFERENCE: - continue - components_with_tests.append(component) - # Select the most preferred platform across all components - if selected_platform is None or PLATFORM_PREFERENCE.index( - platform - ) < PLATFORM_PREFERENCE.index(selected_platform): - selected_platform = platform - break + if platform in PLATFORM_PREFERENCE: + available_platforms.append(platform) + + if not available_platforms: + continue + + # Find the most preferred platform for this component + component_platform = min(available_platforms, key=PLATFORM_PREFERENCE.index) + component_platforms[component] = component_platform + components_with_tests.append(component) + + # Select the most preferred platform across all components + if selected_platform is None or PLATFORM_PREFERENCE.index( + component_platform + ) < PLATFORM_PREFERENCE.index(selected_platform): + selected_platform = component_platform # If no components have tests, don't run memory impact if not components_with_tests: @@ -323,6 +333,13 @@ def detect_memory_impact_config( # Use the most preferred platform found, or fall back to esp8266-ard platform = selected_platform or "esp8266-ard" + # Debug output + print("Memory impact analysis:", file=sys.stderr) + print(f" Changed components: {sorted(changed_component_set)}", file=sys.stderr) + print(f" Components with tests: {components_with_tests}", file=sys.stderr) + print(f" Component platforms: {component_platforms}", file=sys.stderr) + print(f" Selected platform: {platform}", file=sys.stderr) + return { "should_run": "true", "components": components_with_tests, From 293400ee1474ca410113f3aef7c40d539c37e4b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:35:51 -1000 Subject: [PATCH 047/336] fix --- esphome/analyze_memory/__init__.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index cb8fb94c14..349c3da507 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -310,11 +310,19 @@ class MemoryAnalyzer: "✗ Using system c++filt (objdump_path=%s)", self.objdump_path ) + # Strip GCC optimization suffixes before demangling + # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt + symbols_stripped = [] + for symbol in symbols: + # Remove GCC optimization markers + stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol) + symbols_stripped.append(stripped) + try: # Send all symbols to c++filt at once result = subprocess.run( [cppfilt_cmd], - input="\n".join(symbols), + input="\n".join(symbols_stripped), capture_output=True, text=True, check=False, @@ -323,10 +331,22 @@ class MemoryAnalyzer: demangled_lines = result.stdout.strip().split("\n") # Map original to demangled names failed_count = 0 - for original, demangled in zip(symbols, demangled_lines): + for original, stripped, demangled in zip( + symbols, symbols_stripped, demangled_lines + ): + # If we stripped a suffix, add it back to the demangled name for clarity + if original != stripped: + # Find what was stripped + suffix_match = re.search( + r"(\$(?:isra|part|constprop)\$\d+)", original + ) + if suffix_match: + demangled = f"{demangled} [{suffix_match.group(1)}]" + self._demangle_cache[original] = demangled - # Log symbols that failed to demangle (stayed the same) - if original == demangled and original.startswith("_Z"): + + # Log symbols that failed to demangle (stayed the same as stripped version) + if stripped == demangled and stripped.startswith("_Z"): failed_count += 1 if failed_count <= 5: # Only log first 5 failures _LOGGER.warning("Failed to demangle: %s", original[:100]) From db69ce24ae1b53b6077d1cbddd105b3cba5fe9f6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:41:20 -1000 Subject: [PATCH 048/336] fix --- script/ci_memory_impact_comment.py | 42 ++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 84e821cbec..60676949e8 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -92,6 +92,21 @@ def format_change(before: int, after: int) -> str: return f"{emoji} {delta_str} ({pct_str})" +def format_symbol_for_display(symbol: str) -> str: + """Format a symbol name for display in markdown table. + + Args: + symbol: Symbol name to format + + Returns: + Formatted symbol with backticks or HTML details tag for long names + """ + if len(symbol) <= 100: + return f"`{symbol}`" + # Use HTML details for very long symbols (no backticks inside HTML) + return f"
{symbol[:97]}...{symbol}
" + + def create_symbol_changes_table( target_symbols: dict | None, pr_symbols: dict | None ) -> str: @@ -157,12 +172,7 @@ def create_symbol_changes_table( target_str = format_bytes(target_size) pr_str = format_bytes(pr_size) change_str = format_change(target_size, pr_size) - # Truncate very long symbol names but show full name in title attribute - if len(symbol) <= 100: - display_symbol = f"`{symbol}`" - else: - # Use HTML details for very long symbols (no backticks inside HTML) - display_symbol = f"
{symbol[:97]}...{symbol}
" + display_symbol = format_symbol_for_display(symbol) lines.append( f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |" ) @@ -186,8 +196,8 @@ def create_symbol_changes_table( ) for symbol, size in new_symbols[:15]: - display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." - lines.append(f"| `{display_symbol}` | {format_bytes(size)} |") + display_symbol = format_symbol_for_display(symbol) + lines.append(f"| {display_symbol} | {format_bytes(size)} |") if len(new_symbols) > 15: total_new_size = sum(s[1] for s in new_symbols) @@ -209,8 +219,8 @@ def create_symbol_changes_table( ) for symbol, size in removed_symbols[:15]: - display_symbol = symbol if len(symbol) <= 80 else symbol[:77] + "..." - lines.append(f"| `{display_symbol}` | {format_bytes(size)} |") + display_symbol = format_symbol_for_display(symbol) + lines.append(f"| {display_symbol} | {format_bytes(size)} |") if len(removed_symbols) > 15: total_removed_size = sum(s[1] for s in removed_symbols) @@ -242,7 +252,7 @@ def create_detailed_breakdown_table( # Combine all components from both analyses all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) - # Filter to components that have changed + # Filter to components that have changed (ignoring noise ≤2 bytes) changed_components = [] for comp in all_components: target_mem = target_analysis.get(comp, {}) @@ -251,9 +261,9 @@ def create_detailed_breakdown_table( target_flash = target_mem.get("flash_total", 0) pr_flash = pr_mem.get("flash_total", 0) - # Only include if component has changed - if target_flash != pr_flash: - delta = pr_flash - target_flash + # Only include if component has meaningful change (>2 bytes) + delta = pr_flash - target_flash + if abs(delta) > 2: changed_components.append((comp, target_flash, pr_flash, delta)) if not changed_components: @@ -362,6 +372,10 @@ def create_comment_body( {component_breakdown}{symbol_changes}{cache_note} --- +> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). +> **Dynamic memory (heap)** cannot be measured automatically. +> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues. + *This analysis runs automatically when components change. Memory usage is measured from {config_note}.* """ From a1d6bac21a41fcbfd49a7356dc13ea0a06f6ea08 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:44:36 -1000 Subject: [PATCH 049/336] preen --- esphome/analyze_memory/cli.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index e8541b1621..7b004353ec 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -387,17 +387,10 @@ def main(): file=sys.stderr, ) - try: - analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata) - analyzer.analyze() - report = analyzer.generate_report() - print(report) - except Exception as e: - print(f"Error: {e}", file=sys.stderr) - import traceback - - traceback.print_exc(file=sys.stderr) - sys.exit(1) + analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata) + analyzer.analyze() + report = analyzer.generate_report() + print(report) if __name__ == "__main__": From 0fcae15c257772d3c1d868555fcd1cbb82856fe5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:53:03 -1000 Subject: [PATCH 050/336] preen --- script/determine-jobs.py | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 6a24c9eb01..e7a9b649b0 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -56,6 +56,10 @@ from helpers import ( root_path, ) +# Memory impact analysis constants +MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes +MEMORY_IMPACT_FALLBACK_PLATFORM = "esp32-idf" # Most representative platform + def should_run_integration_tests(branch: str | None = None) -> bool: """Determine if integration tests should run based on changed files. @@ -273,6 +277,7 @@ def detect_memory_impact_config( # Find all changed components (excluding core and base bus components) changed_component_set = set() + has_core_changes = False for file in files: if file.startswith(ESPHOME_COMPONENTS_PATH): @@ -282,9 +287,22 @@ def detect_memory_impact_config( # Skip base bus components as they're used across many builds if component not in ["i2c", "spi", "uart", "modbus", "canbus"]: changed_component_set.add(component) + elif file.startswith("esphome/"): + # Core ESPHome files changed (not component-specific) + has_core_changes = True - # If no components changed, don't run memory impact - if not changed_component_set: + # If no components changed but core changed, test representative component + force_fallback_platform = False + if not changed_component_set and has_core_changes: + print( + f"Memory impact: No components changed, but core files changed. " + f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.", + file=sys.stderr, + ) + changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT) + force_fallback_platform = True # Use fallback platform (most representative) + elif not changed_component_set: + # No components and no core changes return {"should_run": "false"} # Find components that have tests on the preferred platform @@ -331,7 +349,11 @@ def detect_memory_impact_config( return {"should_run": "false"} # Use the most preferred platform found, or fall back to esp8266-ard - platform = selected_platform or "esp8266-ard" + # Exception: for core changes, use fallback platform (most representative of codebase) + if force_fallback_platform: + platform = MEMORY_IMPACT_FALLBACK_PLATFORM + else: + platform = selected_platform or "esp8266-ard" # Debug output print("Memory impact analysis:", file=sys.stderr) From 71f2fb83532f12b168ff4381f5a9e3c5a984b756 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 15:56:13 -1000 Subject: [PATCH 051/336] preen --- script/determine-jobs.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index e7a9b649b0..eb8cd5df54 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -38,6 +38,7 @@ Options: from __future__ import annotations import argparse +from enum import StrEnum from functools import cache import json import os @@ -56,9 +57,21 @@ from helpers import ( root_path, ) + +class Platform(StrEnum): + """Platform identifiers for memory impact analysis.""" + + ESP8266_ARD = "esp8266-ard" + ESP32_IDF = "esp32-idf" + ESP32_C3_IDF = "esp32-c3-idf" + ESP32_C6_IDF = "esp32-c6-idf" + ESP32_S2_IDF = "esp32-s2-idf" + ESP32_S3_IDF = "esp32-s3-idf" + + # Memory impact analysis constants MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes -MEMORY_IMPACT_FALLBACK_PLATFORM = "esp32-idf" # Most representative platform +MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform def should_run_integration_tests(branch: str | None = None) -> bool: @@ -262,14 +275,14 @@ def detect_memory_impact_config( """ # Platform preference order for memory impact analysis # Prefer ESP8266 for memory impact as it's the most constrained platform - # ESP32-IDF is preferred over ESP32-Arduino as it's faster to build and more commonly used + # ESP32-IDF is preferred over ESP32-Arduino as it's the most representative of codebase PLATFORM_PREFERENCE = [ - "esp8266-ard", # ESP8266 Arduino (most memory constrained - best for impact analysis) - "esp32-idf", # ESP32 IDF platform (primary ESP32 platform, faster builds) - "esp32-c3-idf", # ESP32-C3 IDF - "esp32-c6-idf", # ESP32-C6 IDF - "esp32-s2-idf", # ESP32-S2 IDF - "esp32-s3-idf", # ESP32-S3 IDF + Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) + Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) + Platform.ESP32_C3_IDF, # ESP32-C3 IDF + Platform.ESP32_C6_IDF, # ESP32-C6 IDF + Platform.ESP32_S2_IDF, # ESP32-S2 IDF + Platform.ESP32_S3_IDF, # ESP32-S3 IDF ] # Get actually changed files (not dependencies) @@ -353,7 +366,7 @@ def detect_memory_impact_config( if force_fallback_platform: platform = MEMORY_IMPACT_FALLBACK_PLATFORM else: - platform = selected_platform or "esp8266-ard" + platform = selected_platform or Platform.ESP8266_ARD # Debug output print("Memory impact analysis:", file=sys.stderr) From a45e94cd06fd063b3370f5eafba9b4536b38b1c5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:02:08 -1000 Subject: [PATCH 052/336] preen --- esphome/analyze_memory/__init__.py | 34 ++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 349c3da507..b8bbd68df2 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -310,13 +310,27 @@ class MemoryAnalyzer: "✗ Using system c++filt (objdump_path=%s)", self.objdump_path ) - # Strip GCC optimization suffixes before demangling + # Strip GCC optimization suffixes and prefixes before demangling # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt + # Prefixes like _GLOBAL__sub_I_ need to be removed and tracked symbols_stripped = [] + symbols_prefixes = [] # Track removed prefixes for symbol in symbols: # Remove GCC optimization markers stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol) + + # Handle GCC global constructor/initializer prefixes + # _GLOBAL__sub_I_ -> extract for demangling + prefix = "" + if stripped.startswith("_GLOBAL__sub_I_"): + prefix = "_GLOBAL__sub_I_" + stripped = stripped[len(prefix) :] + elif stripped.startswith("_GLOBAL__sub_D_"): + prefix = "_GLOBAL__sub_D_" + stripped = stripped[len(prefix) :] + symbols_stripped.append(stripped) + symbols_prefixes.append(prefix) try: # Send all symbols to c++filt at once @@ -331,11 +345,23 @@ class MemoryAnalyzer: demangled_lines = result.stdout.strip().split("\n") # Map original to demangled names failed_count = 0 - for original, stripped, demangled in zip( - symbols, symbols_stripped, demangled_lines + for original, stripped, prefix, demangled in zip( + symbols, symbols_stripped, symbols_prefixes, demangled_lines ): + # Add back any prefix that was removed + if prefix: + if demangled != stripped: + # Successfully demangled - add descriptive prefix + if prefix == "_GLOBAL__sub_I_": + demangled = f"[global constructor for: {demangled}]" + elif prefix == "_GLOBAL__sub_D_": + demangled = f"[global destructor for: {demangled}]" + else: + # Failed to demangle - restore original prefix + demangled = prefix + demangled + # If we stripped a suffix, add it back to the demangled name for clarity - if original != stripped: + if original != stripped and not prefix: # Find what was stripped suffix_match = re.search( r"(\$(?:isra|part|constprop)\$\d+)", original From 29b9073d62631fe1ec8bb57bd8e09185b9119d5e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:07:46 -1000 Subject: [PATCH 053/336] esp32 only platforms --- script/determine-jobs.py | 44 ++++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index eb8cd5df54..d4b46e5474 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -318,10 +318,9 @@ def detect_memory_impact_config( # No components and no core changes return {"should_run": "false"} - # Find components that have tests on the preferred platform + # Find components that have tests and collect their supported platforms components_with_tests = [] - selected_platform = None - component_platforms = {} # Track which platforms each component has + component_platforms_map = {} # Track which platforms each component supports for component in sorted(changed_component_set): tests_dir = Path(root_path) / "tests" / "components" / component @@ -346,33 +345,48 @@ def detect_memory_impact_config( if not available_platforms: continue - # Find the most preferred platform for this component - component_platform = min(available_platforms, key=PLATFORM_PREFERENCE.index) - component_platforms[component] = component_platform + component_platforms_map[component] = set(available_platforms) components_with_tests.append(component) - # Select the most preferred platform across all components - if selected_platform is None or PLATFORM_PREFERENCE.index( - component_platform - ) < PLATFORM_PREFERENCE.index(selected_platform): - selected_platform = component_platform - # If no components have tests, don't run memory impact if not components_with_tests: return {"should_run": "false"} - # Use the most preferred platform found, or fall back to esp8266-ard + # Find common platforms supported by ALL components + # This ensures we can build all components together in a merged config + common_platforms = set(PLATFORM_PREFERENCE) + for component, platforms in component_platforms_map.items(): + common_platforms &= platforms + + # Select the most preferred platform from the common set # Exception: for core changes, use fallback platform (most representative of codebase) if force_fallback_platform: platform = MEMORY_IMPACT_FALLBACK_PLATFORM + elif common_platforms: + # Pick the most preferred platform that all components support + platform = min(common_platforms, key=PLATFORM_PREFERENCE.index) else: - platform = selected_platform or Platform.ESP8266_ARD + # No common platform - fall back to testing each component individually + # Pick the most commonly supported platform + platform_counts = {} + for platforms in component_platforms_map.values(): + for p in platforms: + platform_counts[p] = platform_counts.get(p, 0) + 1 + # Pick the platform supported by most components, preferring earlier in PLATFORM_PREFERENCE + platform = max( + platform_counts.keys(), + key=lambda p: (platform_counts[p], -PLATFORM_PREFERENCE.index(p)), + ) # Debug output print("Memory impact analysis:", file=sys.stderr) print(f" Changed components: {sorted(changed_component_set)}", file=sys.stderr) print(f" Components with tests: {components_with_tests}", file=sys.stderr) - print(f" Component platforms: {component_platforms}", file=sys.stderr) + print( + f" Component platforms: {dict(sorted(component_platforms_map.items()))}", + file=sys.stderr, + ) + print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr) print(f" Selected platform: {platform}", file=sys.stderr) return { From f5d69a25393119f08aec1b46a2f5324435b10eaa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:11:28 -1000 Subject: [PATCH 054/336] esp32 only platforms --- esphome/analyze_memory/__init__.py | 23 ----------------------- script/determine-jobs.py | 8 +++----- script/helpers.py | 17 +++++++++++++++++ 3 files changed, 20 insertions(+), 28 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index b8bbd68df2..07f8df8767 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -2,7 +2,6 @@ from collections import defaultdict from dataclasses import dataclass, field -import json import logging from pathlib import Path import re @@ -422,28 +421,6 @@ class MemoryAnalyzer: return "Other Core" - def to_json(self) -> str: - """Export analysis results as JSON.""" - data = { - "components": { - name: { - "text": mem.text_size, - "rodata": mem.rodata_size, - "data": mem.data_size, - "bss": mem.bss_size, - "flash_total": mem.flash_total, - "ram_total": mem.ram_total, - "symbol_count": mem.symbol_count, - } - for name, mem in self.components.items() - }, - "totals": { - "flash": sum(c.flash_total for c in self.components.values()), - "ram": sum(c.ram_total for c in self.components.values()), - }, - } - return json.dumps(data, indent=2) - if __name__ == "__main__": from .cli import main diff --git a/script/determine-jobs.py b/script/determine-jobs.py index d4b46e5474..befd75fb5b 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -54,6 +54,7 @@ from helpers import ( changed_files, get_all_dependencies, get_components_from_integration_fixtures, + parse_test_filename, root_path, ) @@ -335,11 +336,8 @@ def detect_memory_impact_config( # Check if component has tests for any preferred platform available_platforms = [] for test_file in test_files: - parts = test_file.stem.split(".") - if len(parts) < 2: - continue - platform = parts[1] - if platform in PLATFORM_PREFERENCE: + _, platform = parse_test_filename(test_file) + if platform != "all" and platform in PLATFORM_PREFERENCE: available_platforms.append(platform) if not available_platforms: diff --git a/script/helpers.py b/script/helpers.py index 61306b9489..85e568dcf8 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -46,6 +46,23 @@ def parse_list_components_output(output: str) -> list[str]: return [c.strip() for c in output.strip().split("\n") if c.strip()] +def parse_test_filename(test_file: Path) -> tuple[str, str]: + """Parse test filename to extract test name and platform. + + Test files follow the naming pattern: test..yaml or test-..yaml + + Args: + test_file: Path to test file + + Returns: + Tuple of (test_name, platform) + """ + parts = test_file.stem.split(".") + if len(parts) == 2: + return parts[0], parts[1] # test, platform + return parts[0], "all" + + def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: prefix = "".join(color) if isinstance(color, tuple) else color suffix = colorama.Style.RESET_ALL if reset else "" From 3b8b2c07542e543077bbd4f8b95476d02940f602 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:13:30 -1000 Subject: [PATCH 055/336] esp32 only platforms --- script/determine-jobs.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index befd75fb5b..bf944886ea 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -275,13 +275,13 @@ def detect_memory_impact_config( - use_merged_config: "true" (always use merged config) """ # Platform preference order for memory impact analysis - # Prefer ESP8266 for memory impact as it's the most constrained platform - # ESP32-IDF is preferred over ESP32-Arduino as it's the most representative of codebase + # Prefer newer platforms first as they represent the future of ESPHome + # ESP8266 is most constrained but many new features don't support it PLATFORM_PREFERENCE = [ + Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) Platform.ESP32_C3_IDF, # ESP32-C3 IDF - Platform.ESP32_C6_IDF, # ESP32-C6 IDF Platform.ESP32_S2_IDF, # ESP32-S2 IDF Platform.ESP32_S3_IDF, # ESP32-S3 IDF ] @@ -364,8 +364,8 @@ def detect_memory_impact_config( # Pick the most preferred platform that all components support platform = min(common_platforms, key=PLATFORM_PREFERENCE.index) else: - # No common platform - fall back to testing each component individually - # Pick the most commonly supported platform + # No common platform - pick the most commonly supported platform + # This allows testing components individually even if they can't be merged platform_counts = {} for platforms in component_platforms_map.values(): for p in platforms: From c6ecfd0c55d278e43189988a081e7908c36461a4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:15:46 -1000 Subject: [PATCH 056/336] esp32 only platforms --- esphome/analyze_memory/__init__.py | 162 +++++++++++++++++++---------- 1 file changed, 109 insertions(+), 53 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 07f8df8767..5ef9eab526 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -27,6 +27,12 @@ if TYPE_CHECKING: _LOGGER = logging.getLogger(__name__) +# GCC global constructor/destructor prefix annotations +_GCC_PREFIX_ANNOTATIONS = { + "_GLOBAL__sub_I_": "global constructor for", + "_GLOBAL__sub_D_": "global destructor for", +} + @dataclass class MemorySection: @@ -340,66 +346,116 @@ class MemoryAnalyzer: text=True, check=False, ) - if result.returncode == 0: - demangled_lines = result.stdout.strip().split("\n") - # Map original to demangled names - failed_count = 0 - for original, stripped, prefix, demangled in zip( - symbols, symbols_stripped, symbols_prefixes, demangled_lines - ): - # Add back any prefix that was removed - if prefix: - if demangled != stripped: - # Successfully demangled - add descriptive prefix - if prefix == "_GLOBAL__sub_I_": - demangled = f"[global constructor for: {demangled}]" - elif prefix == "_GLOBAL__sub_D_": - demangled = f"[global destructor for: {demangled}]" - else: - # Failed to demangle - restore original prefix - demangled = prefix + demangled + except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: + # On error, cache originals + _LOGGER.warning("Failed to batch demangle symbols: %s", e) + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return - # If we stripped a suffix, add it back to the demangled name for clarity - if original != stripped and not prefix: - # Find what was stripped - suffix_match = re.search( - r"(\$(?:isra|part|constprop)\$\d+)", original - ) - if suffix_match: - demangled = f"{demangled} [{suffix_match.group(1)}]" - - self._demangle_cache[original] = demangled - - # Log symbols that failed to demangle (stayed the same as stripped version) - if stripped == demangled and stripped.startswith("_Z"): - failed_count += 1 - if failed_count <= 5: # Only log first 5 failures - _LOGGER.warning("Failed to demangle: %s", original[:100]) - - if failed_count > 0: - _LOGGER.warning( - "Failed to demangle %d/%d symbols using %s", - failed_count, - len(symbols), - cppfilt_cmd, - ) - else: - _LOGGER.warning( - "Successfully demangled all %d symbols", len(symbols) - ) - return + if result.returncode != 0: _LOGGER.warning( "c++filt exited with code %d: %s", result.returncode, result.stderr[:200] if result.stderr else "(no error output)", ) - except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: - # On error, cache originals - _LOGGER.warning("Failed to batch demangle symbols: %s", e) + # Cache originals on failure + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return - # If demangling failed, cache originals - for symbol in symbols: - self._demangle_cache[symbol] = symbol + # Process demangled output + self._process_demangled_output( + symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd + ) + + def _process_demangled_output( + self, + symbols: list[str], + symbols_stripped: list[str], + symbols_prefixes: list[str], + demangled_output: str, + cppfilt_cmd: str, + ) -> None: + """Process demangled symbol output and populate cache. + + Args: + symbols: Original symbol names + symbols_stripped: Stripped symbol names sent to c++filt + symbols_prefixes: Removed prefixes to restore + demangled_output: Output from c++filt + cppfilt_cmd: Path to c++filt command (for logging) + """ + demangled_lines = demangled_output.strip().split("\n") + failed_count = 0 + + for original, stripped, prefix, demangled in zip( + symbols, symbols_stripped, symbols_prefixes, demangled_lines + ): + # Add back any prefix that was removed + demangled = self._restore_symbol_prefix(prefix, stripped, demangled) + + # If we stripped a suffix, add it back to the demangled name for clarity + if original != stripped and not prefix: + demangled = self._restore_symbol_suffix(original, demangled) + + self._demangle_cache[original] = demangled + + # Log symbols that failed to demangle (stayed the same as stripped version) + if stripped == demangled and stripped.startswith("_Z"): + failed_count += 1 + if failed_count <= 5: # Only log first 5 failures + _LOGGER.warning("Failed to demangle: %s", original[:100]) + + if failed_count > 0: + _LOGGER.warning( + "Failed to demangle %d/%d symbols using %s", + failed_count, + len(symbols), + cppfilt_cmd, + ) + else: + _LOGGER.warning("Successfully demangled all %d symbols", len(symbols)) + + @staticmethod + def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str: + """Restore prefix that was removed before demangling. + + Args: + prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_") + stripped: Stripped symbol name + demangled: Demangled symbol name + + Returns: + Demangled name with prefix restored/annotated + """ + if not prefix: + return demangled + + # Successfully demangled - add descriptive prefix + if demangled != stripped and ( + annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix) + ): + return f"[{annotation}: {demangled}]" + + # Failed to demangle - restore original prefix + return prefix + demangled + + @staticmethod + def _restore_symbol_suffix(original: str, demangled: str) -> str: + """Restore GCC optimization suffix that was removed before demangling. + + Args: + original: Original symbol name with suffix + demangled: Demangled symbol name without suffix + + Returns: + Demangled name with suffix annotation + """ + suffix_match = re.search(r"(\$(?:isra|part|constprop)\$\d+)", original) + if suffix_match: + return f"{demangled} [{suffix_match.group(1)}]" + return demangled def _demangle_symbol(self, symbol: str) -> str: """Get demangled C++ symbol name from cache.""" From 558d4eb9ddfbd7274151046798630dd5c40e6cd7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:19:50 -1000 Subject: [PATCH 057/336] preen --- script/ci_memory_impact_extract.py | 57 +++++++++++++----------------- script/determine-jobs.py | 36 ++++++++++--------- 2 files changed, 45 insertions(+), 48 deletions(-) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 96f947e12a..76632ebc33 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -132,41 +132,34 @@ def run_detailed_analysis(build_dir: str) -> dict | None: file=sys.stderr, ) - try: - analyzer = MemoryAnalyzer(elf_path, idedata=idedata) - components = analyzer.analyze() + analyzer = MemoryAnalyzer(elf_path, idedata=idedata) + components = analyzer.analyze() - # Convert to JSON-serializable format - result = { - "components": { - name: { - "text": mem.text_size, - "rodata": mem.rodata_size, - "data": mem.data_size, - "bss": mem.bss_size, - "flash_total": mem.flash_total, - "ram_total": mem.ram_total, - "symbol_count": mem.symbol_count, - } - for name, mem in components.items() - }, - "symbols": {}, - } + # Convert to JSON-serializable format + result = { + "components": { + name: { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + for name, mem in components.items() + }, + "symbols": {}, + } - # Build symbol map - for section in analyzer.sections.values(): - for symbol_name, size, _ in section.symbols: - if size > 0: - demangled = analyzer._demangle_symbol(symbol_name) - result["symbols"][demangled] = size + # Build symbol map + for section in analyzer.sections.values(): + for symbol_name, size, _ in section.symbols: + if size > 0: + demangled = analyzer._demangle_symbol(symbol_name) + result["symbols"][demangled] = size - return result - except Exception as e: - print(f"Warning: Failed to run detailed analysis: {e}", file=sys.stderr) - import traceback - - traceback.print_exc(file=sys.stderr) - return None + return result def main() -> int: diff --git a/script/determine-jobs.py b/script/determine-jobs.py index bf944886ea..8e2c239fe2 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -74,6 +74,18 @@ class Platform(StrEnum): MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform +# Platform preference order for memory impact analysis +# Prefer newer platforms first as they represent the future of ESPHome +# ESP8266 is most constrained but many new features don't support it +MEMORY_IMPACT_PLATFORM_PREFERENCE = [ + Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) + Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) + Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) + Platform.ESP32_C3_IDF, # ESP32-C3 IDF + Platform.ESP32_S2_IDF, # ESP32-S2 IDF + Platform.ESP32_S3_IDF, # ESP32-S3 IDF +] + def should_run_integration_tests(branch: str | None = None) -> bool: """Determine if integration tests should run based on changed files. @@ -274,17 +286,6 @@ def detect_memory_impact_config( - platform: platform name for the merged build - use_merged_config: "true" (always use merged config) """ - # Platform preference order for memory impact analysis - # Prefer newer platforms first as they represent the future of ESPHome - # ESP8266 is most constrained but many new features don't support it - PLATFORM_PREFERENCE = [ - Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) - Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) - Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) - Platform.ESP32_C3_IDF, # ESP32-C3 IDF - Platform.ESP32_S2_IDF, # ESP32-S2 IDF - Platform.ESP32_S3_IDF, # ESP32-S3 IDF - ] # Get actually changed files (not dependencies) files = changed_files(branch) @@ -337,7 +338,7 @@ def detect_memory_impact_config( available_platforms = [] for test_file in test_files: _, platform = parse_test_filename(test_file) - if platform != "all" and platform in PLATFORM_PREFERENCE: + if platform != "all" and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE: available_platforms.append(platform) if not available_platforms: @@ -352,7 +353,7 @@ def detect_memory_impact_config( # Find common platforms supported by ALL components # This ensures we can build all components together in a merged config - common_platforms = set(PLATFORM_PREFERENCE) + common_platforms = set(MEMORY_IMPACT_PLATFORM_PREFERENCE) for component, platforms in component_platforms_map.items(): common_platforms &= platforms @@ -362,7 +363,7 @@ def detect_memory_impact_config( platform = MEMORY_IMPACT_FALLBACK_PLATFORM elif common_platforms: # Pick the most preferred platform that all components support - platform = min(common_platforms, key=PLATFORM_PREFERENCE.index) + platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index) else: # No common platform - pick the most commonly supported platform # This allows testing components individually even if they can't be merged @@ -370,10 +371,13 @@ def detect_memory_impact_config( for platforms in component_platforms_map.values(): for p in platforms: platform_counts[p] = platform_counts.get(p, 0) + 1 - # Pick the platform supported by most components, preferring earlier in PLATFORM_PREFERENCE + # Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE platform = max( platform_counts.keys(), - key=lambda p: (platform_counts[p], -PLATFORM_PREFERENCE.index(p)), + key=lambda p: ( + platform_counts[p], + -MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p), + ), ) # Debug output From 5e1ee92754c3262d6dc8af99830d2fc6099ec2a8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:34:25 -1000 Subject: [PATCH 058/336] add tests --- tests/components/sensor/common.yaml | 101 ++++++++++++++++++ tests/components/sensor/test.esp8266-ard.yaml | 1 + 2 files changed, 102 insertions(+) create mode 100644 tests/components/sensor/common.yaml create mode 100644 tests/components/sensor/test.esp8266-ard.yaml diff --git a/tests/components/sensor/common.yaml b/tests/components/sensor/common.yaml new file mode 100644 index 0000000000..ace7d0a38a --- /dev/null +++ b/tests/components/sensor/common.yaml @@ -0,0 +1,101 @@ +sensor: + # Source sensor for testing filters + - platform: template + name: "Source Sensor" + id: source_sensor + lambda: return 42.0; + update_interval: 1s + + # Streaming filters (window_size == send_every) - uses StreamingFilter base class + - platform: copy + source_id: source_sensor + name: "Streaming Min Filter" + filters: + - min: + window_size: 10 + send_every: 10 # Batch window → StreamingMinFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Max Filter" + filters: + - max: + window_size: 10 + send_every: 10 # Batch window → StreamingMaxFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 10 # Batch window → StreamingMovingAverageFilter + + # Sliding window filters (window_size != send_every) - uses SlidingWindowFilter base class with ring buffer + - platform: copy + source_id: source_sensor + name: "Sliding Min Filter" + filters: + - min: + window_size: 10 + send_every: 5 # Sliding window → MinFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Max Filter" + filters: + - max: + window_size: 10 + send_every: 5 # Sliding window → MaxFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Median Filter" + filters: + - median: + window_size: 10 + send_every: 5 # Sliding window → MedianFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Quantile Filter" + filters: + - quantile: + window_size: 10 + send_every: 5 + quantile: 0.9 # Sliding window → QuantileFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 5 # Sliding window → SlidingWindowMovingAverageFilter with ring buffer + + # Edge cases + - platform: copy + source_id: source_sensor + name: "Large Batch Window Min" + filters: + - min: + window_size: 1000 + send_every: 1000 # Large batch → StreamingMinFilter (4 bytes, not 4KB) + + - platform: copy + source_id: source_sensor + name: "Small Sliding Window" + filters: + - median: + window_size: 3 + send_every: 1 # Frequent output → MedianFilter with 3-element ring buffer + + # send_first_at parameter test + - platform: copy + source_id: source_sensor + name: "Early Send Filter" + filters: + - max: + window_size: 10 + send_every: 10 + send_first_at: 1 # Send after first value diff --git a/tests/components/sensor/test.esp8266-ard.yaml b/tests/components/sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 1ec9383abe07ea278824ca63e55be8d5751ffe05 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:39:10 -1000 Subject: [PATCH 059/336] preen --- .github/workflows/ci.yml | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 74ba831bc4..f2f3169eae 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -540,13 +540,22 @@ jobs: with: ref: ${{ github.base_ref }} - # Create cache key based on: - # 1. Target branch commit SHA - # 2. Hash of build infrastructure files (scripts and CI workflow) - # 3. Platform being tested - # 4. Component list + # Check if memory impact extraction script exists on target branch + # If not, skip the analysis (this handles older branches that don't have the feature) + - name: Check for memory impact script + id: check-script + run: | + if [ -f "script/ci_memory_impact_extract.py" ]; then + echo "skip=false" >> $GITHUB_OUTPUT + else + echo "skip=true" >> $GITHUB_OUTPUT + echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis" + fi + + # All remaining steps only run if script exists - name: Generate cache key id: cache-key + if: steps.check-script.outputs.skip != 'true' run: | # Get the commit SHA of the target branch target_sha=$(git rev-parse HEAD) @@ -571,15 +580,16 @@ jobs: echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT echo "Cache key: ${cache_key}" - # Try to restore cached analysis results - name: Restore cached memory analysis id: cache-memory-analysis + if: steps.check-script.outputs.skip != 'true' uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: memory-analysis-target.json key: ${{ steps.cache-key.outputs.cache-key }} - name: Cache status + if: steps.check-script.outputs.skip != 'true' run: | if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then echo "✓ Cache hit! Using cached memory analysis results." @@ -588,23 +598,22 @@ jobs: echo "✗ Cache miss. Will build and analyze memory usage." fi - # Only restore Python and build if cache miss - name: Restore Python - if: steps.cache-memory-analysis.outputs.cache-hit != 'true' + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' uses: ./.github/actions/restore-python with: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} - name: Cache platformio - if: steps.cache-memory-analysis.outputs.cache-hit != 'true' + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: ~/.platformio key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} - name: Build, compile, and analyze memory - if: steps.cache-memory-analysis.outputs.cache-hit != 'true' + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' id: build run: | . venv/bin/activate @@ -631,17 +640,16 @@ jobs: --output-env \ --output-json memory-analysis-target.json - # Save build results to cache for future runs - name: Save memory analysis to cache - if: steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: memory-analysis-target.json key: ${{ steps.cache-key.outputs.cache-key }} - # Extract outputs from cached or freshly built analysis - name: Extract memory usage for outputs id: extract + if: steps.check-script.outputs.skip != 'true' run: | if [ -f memory-analysis-target.json ]; then ram=$(jq -r '.ram_bytes' memory-analysis-target.json) From 6fe5a0c736c3fa57b69d4d3e5d97931e80fff516 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 16:44:38 -1000 Subject: [PATCH 060/336] preen --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f2f3169eae..efa9ce0bca 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -667,7 +667,7 @@ jobs: with: name: memory-analysis-target path: memory-analysis-target.json - if-no-files-found: error + if-no-files-found: warn retention-days: 1 memory-impact-pr-branch: From 0475ec55334b57981fdd9dd87cef2196bcb20e4c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:01:20 -1000 Subject: [PATCH 061/336] preen --- script/ci_memory_impact_comment.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 60676949e8..0be783ab3d 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -73,10 +73,12 @@ def format_change(before: int, after: int) -> str: # Format delta with sign and always show in bytes for precision if delta > 0: delta_str = f"+{delta:,} bytes" - emoji = "📈" + # Use 🚨 for significant increases (>1%), 🔸 for smaller ones + emoji = "🚨" if abs(percentage) > 1.0 else "🔸" elif delta < 0: delta_str = f"{delta:,} bytes" - emoji = "📉" + # Use 🎉 for significant reductions (>1%), ✅ for smaller ones + emoji = "🎉" if abs(percentage) > 1.0 else "✅" else: delta_str = "+0 bytes" emoji = "➡️" From 8fd43f1d96a80311c02d3acc5086e0dfd3211034 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:09:05 -1000 Subject: [PATCH 062/336] tweak --- script/ci_memory_impact_comment.py | 36 ++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 0be783ab3d..2d36ffa405 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -22,6 +22,10 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) # Comment marker to identify our memory impact comments COMMENT_MARKER = "" +# Thresholds for emoji significance indicators (percentage) +OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes +COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes + def load_analysis_json(json_path: str) -> dict | None: """Load memory analysis results from JSON file. @@ -57,12 +61,16 @@ def format_bytes(bytes_value: int) -> str: return f"{bytes_value:,} bytes" -def format_change(before: int, after: int) -> str: +def format_change( + before: int, after: int, use_trend_icons: bool = False, threshold: float = 1.0 +) -> str: """Format memory change with delta and percentage. Args: before: Memory usage before change (in bytes) after: Memory usage after change (in bytes) + use_trend_icons: If True, use 📈/📉 chart icons; if False, use status emojis + threshold: Percentage threshold for "significant" change (default 1.0%) Returns: Formatted string with delta and percentage @@ -73,12 +81,18 @@ def format_change(before: int, after: int) -> str: # Format delta with sign and always show in bytes for precision if delta > 0: delta_str = f"+{delta:,} bytes" - # Use 🚨 for significant increases (>1%), 🔸 for smaller ones - emoji = "🚨" if abs(percentage) > 1.0 else "🔸" + if use_trend_icons: + emoji = "📈" + else: + # Use 🚨 for significant increases, 🔸 for smaller ones + emoji = "🚨" if abs(percentage) > threshold else "🔸" elif delta < 0: delta_str = f"{delta:,} bytes" - # Use 🎉 for significant reductions (>1%), ✅ for smaller ones - emoji = "🎉" if abs(percentage) > 1.0 else "✅" + if use_trend_icons: + emoji = "📉" + else: + # Use 🎉 for significant reductions, ✅ for smaller ones + emoji = "🎉" if abs(percentage) > threshold else "✅" else: delta_str = "+0 bytes" emoji = "➡️" @@ -173,7 +187,7 @@ def create_symbol_changes_table( for symbol, target_size, pr_size, delta in changed_symbols[:30]: target_str = format_bytes(target_size) pr_str = format_bytes(pr_size) - change_str = format_change(target_size, pr_size) + change_str = format_change(target_size, pr_size, use_trend_icons=True) display_symbol = format_symbol_for_display(symbol) lines.append( f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |" @@ -287,7 +301,9 @@ def create_detailed_breakdown_table( for comp, target_flash, pr_flash, delta in changed_components[:20]: target_str = format_bytes(target_flash) pr_str = format_bytes(pr_flash) - change_str = format_change(target_flash, pr_flash) + change_str = format_change( + target_flash, pr_flash, threshold=COMPONENT_CHANGE_THRESHOLD + ) lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |") if len(changed_components) > 20: @@ -331,8 +347,10 @@ def create_comment_body( Returns: Formatted comment body """ - ram_change = format_change(target_ram, pr_ram) - flash_change = format_change(target_flash, pr_flash) + ram_change = format_change(target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD) + flash_change = format_change( + target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD + ) # Use provided analysis data if available component_breakdown = "" From d98b00f56ddffc2e318e9830952712c1cbfd33f5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:10:28 -1000 Subject: [PATCH 063/336] tweak --- script/ci_memory_impact_comment.py | 33 ++++++++++++++++-------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 2d36ffa405..f381df0ff6 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -61,16 +61,15 @@ def format_bytes(bytes_value: int) -> str: return f"{bytes_value:,} bytes" -def format_change( - before: int, after: int, use_trend_icons: bool = False, threshold: float = 1.0 -) -> str: +def format_change(before: int, after: int, threshold: float | None = None) -> str: """Format memory change with delta and percentage. Args: before: Memory usage before change (in bytes) after: Memory usage after change (in bytes) - use_trend_icons: If True, use 📈/📉 chart icons; if False, use status emojis - threshold: Percentage threshold for "significant" change (default 1.0%) + threshold: Optional percentage threshold for "significant" change. + If provided, adds supplemental emoji (🎉/🚨/🔸/✅) to chart icons. + If None, only shows chart icons (📈/📉/➡️). Returns: Formatted string with delta and percentage @@ -78,21 +77,25 @@ def format_change( delta = after - before percentage = 0.0 if before == 0 else (delta / before) * 100 - # Format delta with sign and always show in bytes for precision + # Always use chart icons to show direction if delta > 0: delta_str = f"+{delta:,} bytes" - if use_trend_icons: - emoji = "📈" + trend_icon = "📈" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🚨" if abs(percentage) > threshold else "🔸" + emoji = f"{trend_icon} {significance}" else: - # Use 🚨 for significant increases, 🔸 for smaller ones - emoji = "🚨" if abs(percentage) > threshold else "🔸" + emoji = trend_icon elif delta < 0: delta_str = f"{delta:,} bytes" - if use_trend_icons: - emoji = "📉" + trend_icon = "📉" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🎉" if abs(percentage) > threshold else "✅" + emoji = f"{trend_icon} {significance}" else: - # Use 🎉 for significant reductions, ✅ for smaller ones - emoji = "🎉" if abs(percentage) > threshold else "✅" + emoji = trend_icon else: delta_str = "+0 bytes" emoji = "➡️" @@ -187,7 +190,7 @@ def create_symbol_changes_table( for symbol, target_size, pr_size, delta in changed_symbols[:30]: target_str = format_bytes(target_size) pr_str = format_bytes(pr_size) - change_str = format_change(target_size, pr_size, use_trend_icons=True) + change_str = format_change(target_size, pr_size) # Chart icons only display_symbol = format_symbol_for_display(symbol) lines.append( f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |" From cd93f7f55a07ea73a21cf78a620f77e03d4bb4c0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:13:24 -1000 Subject: [PATCH 064/336] tweak --- .coveragerc | 1 + 1 file changed, 1 insertion(+) diff --git a/.coveragerc b/.coveragerc index f23592be24..c15e79a31b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,4 +1,5 @@ [run] omit = esphome/components/* + esphome/analyze_memory/* tests/integration/* From 931e3f80f0b1a70adf1264a75e4d568c90af9d1b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:25:03 -1000 Subject: [PATCH 065/336] no memory when tatget branch does not have --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index efa9ce0bca..42f934de9d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -534,6 +534,7 @@ jobs: ram_usage: ${{ steps.extract.outputs.ram_usage }} flash_usage: ${{ steps.extract.outputs.flash_usage }} cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }} + skip: ${{ steps.check-script.outputs.skip }} steps: - name: Check out target branch uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -735,7 +736,7 @@ jobs: - determine-jobs - memory-impact-target-branch - memory-impact-pr-branch - if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true' permissions: contents: read pull-requests: write From 5080698c3a7ed5b64429ce5d8b0fbfeddb635c9a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:34:16 -1000 Subject: [PATCH 066/336] no memory when tatget branch does not have --- script/ci_memory_impact_comment.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index f381df0ff6..8b0dbb6f58 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -304,9 +304,9 @@ def create_detailed_breakdown_table( for comp, target_flash, pr_flash, delta in changed_components[:20]: target_str = format_bytes(target_flash) pr_str = format_bytes(pr_flash) - change_str = format_change( - target_flash, pr_flash, threshold=COMPONENT_CHANGE_THRESHOLD - ) + # Only apply threshold to ESPHome components, not framework/infrastructure + threshold = COMPONENT_CHANGE_THRESHOLD if comp.startswith("[esphome]") else None + change_str = format_change(target_flash, pr_flash, threshold=threshold) lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |") if len(changed_components) > 20: From c70937ed01441c97f7c7d8132d05d635ac3a2534 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:55:05 -1000 Subject: [PATCH 067/336] dry --- script/analyze_component_buses.py | 14 +------ script/determine-jobs.py | 64 ++++++++++++++----------------- script/helpers.py | 62 +++++++++++++++++++++++++++--- script/list-components.py | 10 ++--- script/split_components_for_ci.py | 10 ++--- script/test_build_components.py | 9 +++-- 6 files changed, 100 insertions(+), 69 deletions(-) diff --git a/script/analyze_component_buses.py b/script/analyze_component_buses.py index d0882e22e9..78f5ca3344 100755 --- a/script/analyze_component_buses.py +++ b/script/analyze_component_buses.py @@ -34,6 +34,8 @@ from typing import Any # Add esphome to path sys.path.insert(0, str(Path(__file__).parent.parent)) +from helpers import BASE_BUS_COMPONENTS + from esphome import yaml_util from esphome.config_helpers import Extend, Remove @@ -67,18 +69,6 @@ NO_BUSES_SIGNATURE = "no_buses" # Isolated components have unique signatures and cannot be merged with others ISOLATED_SIGNATURE_PREFIX = "isolated_" -# Base bus components - these ARE the bus implementations and should not -# be flagged as needing migration since they are the platform/base components -BASE_BUS_COMPONENTS = { - "i2c", - "spi", - "uart", - "modbus", - "canbus", - "remote_transmitter", - "remote_receiver", -} - # Components that must be tested in isolation (not grouped or batched with others) # These have known build issues that prevent grouping # NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 8e2c239fe2..5767ced859 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -38,6 +38,7 @@ Options: from __future__ import annotations import argparse +from collections import Counter from enum import StrEnum from functools import cache import json @@ -48,11 +49,13 @@ import sys from typing import Any from helpers import ( + BASE_BUS_COMPONENTS, CPP_FILE_EXTENSIONS, - ESPHOME_COMPONENTS_PATH, PYTHON_FILE_EXTENSIONS, changed_files, get_all_dependencies, + get_component_from_path, + get_component_test_files, get_components_from_integration_fixtures, parse_test_filename, root_path, @@ -142,12 +145,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool: # Check if any required components changed for file in files: - if file.startswith(ESPHOME_COMPONENTS_PATH): - parts = file.split("/") - if len(parts) >= 3: - component = parts[2] - if component in all_required_components: - return True + component = get_component_from_path(file) + if component and component in all_required_components: + return True return False @@ -261,10 +261,7 @@ def _component_has_tests(component: str) -> bool: Returns: True if the component has test YAML files """ - tests_dir = Path(root_path) / "tests" / "components" / component - if not tests_dir.exists(): - return False - return any(tests_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component)) def detect_memory_impact_config( @@ -291,17 +288,15 @@ def detect_memory_impact_config( files = changed_files(branch) # Find all changed components (excluding core and base bus components) - changed_component_set = set() + changed_component_set: set[str] = set() has_core_changes = False for file in files: - if file.startswith(ESPHOME_COMPONENTS_PATH): - parts = file.split("/") - if len(parts) >= 3: - component = parts[2] - # Skip base bus components as they're used across many builds - if component not in ["i2c", "spi", "uart", "modbus", "canbus"]: - changed_component_set.add(component) + component = get_component_from_path(file) + if component: + # Skip base bus components as they're used across many builds + if component not in BASE_BUS_COMPONENTS: + changed_component_set.add(component) elif file.startswith("esphome/"): # Core ESPHome files changed (not component-specific) has_core_changes = True @@ -321,25 +316,24 @@ def detect_memory_impact_config( return {"should_run": "false"} # Find components that have tests and collect their supported platforms - components_with_tests = [] - component_platforms_map = {} # Track which platforms each component supports + components_with_tests: list[str] = [] + component_platforms_map: dict[ + str, set[Platform] + ] = {} # Track which platforms each component supports for component in sorted(changed_component_set): - tests_dir = Path(root_path) / "tests" / "components" / component - if not tests_dir.exists(): - continue - # Look for test files on preferred platforms - test_files = list(tests_dir.glob("test.*.yaml")) + test_files = get_component_test_files(component) if not test_files: continue # Check if component has tests for any preferred platform - available_platforms = [] - for test_file in test_files: - _, platform = parse_test_filename(test_file) - if platform != "all" and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE: - available_platforms.append(platform) + available_platforms = [ + platform + for test_file in test_files + if (platform := parse_test_filename(test_file)[1]) != "all" + and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE + ] if not available_platforms: continue @@ -367,10 +361,10 @@ def detect_memory_impact_config( else: # No common platform - pick the most commonly supported platform # This allows testing components individually even if they can't be merged - platform_counts = {} - for platforms in component_platforms_map.values(): - for p in platforms: - platform_counts[p] = platform_counts.get(p, 0) + 1 + # Count how many components support each platform + platform_counts = Counter( + p for platforms in component_platforms_map.values() for p in platforms + ) # Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE platform = max( platform_counts.keys(), diff --git a/script/helpers.py b/script/helpers.py index 85e568dcf8..edde3d78af 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -29,6 +29,18 @@ YAML_FILE_EXTENSIONS = (".yaml", ".yml") # Component path prefix ESPHOME_COMPONENTS_PATH = "esphome/components/" +# Base bus components - these ARE the bus implementations and should not +# be flagged as needing migration since they are the platform/base components +BASE_BUS_COMPONENTS = { + "i2c", + "spi", + "uart", + "modbus", + "canbus", + "remote_transmitter", + "remote_receiver", +} + def parse_list_components_output(output: str) -> list[str]: """Parse the output from list-components.py script. @@ -63,6 +75,48 @@ def parse_test_filename(test_file: Path) -> tuple[str, str]: return parts[0], "all" +def get_component_from_path(file_path: str) -> str | None: + """Extract component name from a file path. + + Args: + file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp") + + Returns: + Component name if path is in components directory, None otherwise + """ + if not file_path.startswith(ESPHOME_COMPONENTS_PATH): + return None + parts = file_path.split("/") + if len(parts) >= 3: + return parts[2] + return None + + +def get_component_test_files( + component: str, *, all_variants: bool = False +) -> list[Path]: + """Get test files for a component. + + Args: + component: Component name (e.g., "wifi") + all_variants: If True, returns all test files including variants (test-*.yaml). + If False, returns only base test files (test.*.yaml). + Default is False. + + Returns: + List of test file paths for the component, or empty list if none exist + """ + tests_dir = Path(root_path) / "tests" / "components" / component + if not tests_dir.exists(): + return [] + + if all_variants: + # Match both test.*.yaml and test-*.yaml patterns + return list(tests_dir.glob("test[.-]*.yaml")) + # Match only test.*.yaml (base tests) + return list(tests_dir.glob("test.*.yaml")) + + def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: prefix = "".join(color) if isinstance(color, tuple) else color suffix = colorama.Style.RESET_ALL if reset else "" @@ -331,11 +385,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]: # because changes in one file can affect other files in the same component. filtered_files = [] for f in files: - if f.startswith(ESPHOME_COMPONENTS_PATH): - # Check if file belongs to any of the changed components - parts = f.split("/") - if len(parts) >= 3 and parts[2] in component_set: - filtered_files.append(f) + component = get_component_from_path(f) + if component and component in component_set: + filtered_files.append(f) return filtered_files diff --git a/script/list-components.py b/script/list-components.py index 9abb2bc345..11533ceb30 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -4,7 +4,7 @@ from collections.abc import Callable from pathlib import Path import sys -from helpers import changed_files, git_ls_files +from helpers import changed_files, get_component_from_path, git_ls_files from esphome.const import ( KEY_CORE, @@ -30,11 +30,9 @@ def get_all_component_files() -> list[str]: def extract_component_names_array_from_files_array(files): components = [] for file in files: - file_parts = file.split("/") - if len(file_parts) >= 4: - component_name = file_parts[2] - if component_name not in components: - components.append(component_name) + component_name = get_component_from_path(file) + if component_name and component_name not in components: + components.append(component_name) return components diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index dff46d3619..6ba2598eda 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -28,6 +28,7 @@ from script.analyze_component_buses import ( create_grouping_signature, merge_compatible_bus_groups, ) +from script.helpers import get_component_test_files # Weighting for batch creation # Isolated components can't be grouped/merged, so they count as 10x @@ -45,17 +46,12 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool: Args: component_name: Name of the component - tests_dir: Path to tests/components directory + tests_dir: Path to tests/components directory (unused, kept for compatibility) Returns: True if the component has test.*.yaml files """ - component_dir = tests_dir / component_name - if not component_dir.exists() or not component_dir.is_dir(): - return False - - # Check for test.*.yaml files - return any(component_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component_name)) def create_intelligent_batches( diff --git a/script/test_build_components.py b/script/test_build_components.py index 07f2680799..77c97a8773 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -39,6 +39,7 @@ from script.analyze_component_buses import ( merge_compatible_bus_groups, uses_local_file_references, ) +from script.helpers import get_component_test_files from script.merge_component_configs import merge_component_configs @@ -100,10 +101,10 @@ def find_component_tests( if not comp_dir.is_dir(): continue - # Find test files - either base only (test.*.yaml) or all (test[.-]*.yaml) - pattern = "test.*.yaml" if base_only else "test[.-]*.yaml" - for test_file in comp_dir.glob(pattern): - component_tests[comp_dir.name].append(test_file) + # Get test files using helper function + test_files = get_component_test_files(comp_dir.name, all_variants=not base_only) + if test_files: + component_tests[comp_dir.name] = test_files return dict(component_tests) From b95999aca7cc2d39ff6846627b8a0936f409465d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:55:37 -1000 Subject: [PATCH 068/336] Update esphome/analyze_memory/__init__.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/analyze_memory/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 5ef9eab526..74299d4e95 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -295,7 +295,7 @@ class MemoryAnalyzer: cppfilt_cmd = "c++filt" _LOGGER.warning("Demangling %d symbols", len(symbols)) - _LOGGER.warning("objdump_path = %s", self.objdump_path) + _LOGGER.debug("objdump_path = %s", self.objdump_path) # Check if we have a toolchain-specific c++filt if self.objdump_path and self.objdump_path != "objdump": From 9a4288d81a02e7484e393f97a89fab856ae6e4e9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:56:41 -1000 Subject: [PATCH 069/336] Update script/determine-jobs.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- script/determine-jobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 5767ced859..26e91edbe1 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -26,7 +26,7 @@ The CI workflow uses this information to: - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Determine which components to test individually - Decide how to split component tests (if there are many) -- Run memory impact analysis when exactly one component changes +- Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes Usage: python script/determine-jobs.py [-b BRANCH] From a96cc5e6f20a8b7205a48ea38836fb22ff012239 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:57:33 -1000 Subject: [PATCH 070/336] Update esphome/analyze_memory/__init__.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/analyze_memory/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 74299d4e95..3e85c4d869 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -294,24 +294,24 @@ class MemoryAnalyzer: # Try to find the appropriate c++filt for the platform cppfilt_cmd = "c++filt" - _LOGGER.warning("Demangling %d symbols", len(symbols)) + _LOGGER.info("Demangling %d symbols", len(symbols)) _LOGGER.debug("objdump_path = %s", self.objdump_path) # Check if we have a toolchain-specific c++filt if self.objdump_path and self.objdump_path != "objdump": # Replace objdump with c++filt in the path potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") - _LOGGER.warning("Checking for toolchain c++filt at: %s", potential_cppfilt) + _LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt) if Path(potential_cppfilt).exists(): cppfilt_cmd = potential_cppfilt - _LOGGER.warning("✓ Using toolchain c++filt: %s", cppfilt_cmd) + _LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd) else: - _LOGGER.warning( + _LOGGER.info( "✗ Toolchain c++filt not found at %s, using system c++filt", potential_cppfilt, ) else: - _LOGGER.warning( + _LOGGER.info( "✗ Using system c++filt (objdump_path=%s)", self.objdump_path ) From 0b09e506854decd24b44a6ee77e2831f5a193859 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 17:57:42 -1000 Subject: [PATCH 071/336] preen --- esphome/analyze_memory/cli.py | 4 ++-- script/determine-jobs.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 7b004353ec..bcf9f45de9 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -183,9 +183,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%" ) - # Top 10 largest core symbols + # Top 15 largest core symbols lines.append("") - lines.append("Top 10 Largest [esphome]core Symbols:") + lines.append("Top 15 Largest [esphome]core Symbols:") sorted_core_symbols = sorted( self._esphome_core_symbols, key=lambda x: x[2], reverse=True ) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 5767ced859..bcc357d953 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -13,9 +13,9 @@ what files have changed. It outputs JSON with the following structure: "component_test_count": 5, "memory_impact": { "should_run": "true/false", - "component": "component_name", - "test_file": "test.esp32-idf.yaml", - "platform": "esp32-idf" + "components": ["component1", "component2", ...], + "platform": "esp32-idf", + "use_merged_config": "true" } } @@ -26,7 +26,7 @@ The CI workflow uses this information to: - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Determine which components to test individually - Decide how to split component tests (if there are many) -- Run memory impact analysis when exactly one component changes +- Run memory impact analysis when components change Usage: python script/determine-jobs.py [-b BRANCH] From bbd636a8cc7fecd046ef16cc919a71bf37e3db97 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci-lite[bot]" <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> Date: Sat, 18 Oct 2025 03:59:23 +0000 Subject: [PATCH 072/336] [pre-commit.ci lite] apply automatic fixes --- esphome/analyze_memory/__init__.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 3e85c4d869..b5d574807e 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -311,9 +311,7 @@ class MemoryAnalyzer: potential_cppfilt, ) else: - _LOGGER.info( - "✗ Using system c++filt (objdump_path=%s)", self.objdump_path - ) + _LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path) # Strip GCC optimization suffixes and prefixes before demangling # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt From 9cf1fd24fd5e1a91bbc5fe49ed941b60dce5eb49 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:06:13 -1000 Subject: [PATCH 073/336] preen --- esphome/analyze_memory/__init__.py | 42 +++---- esphome/analyze_memory/cli.py | 2 +- script/ci_memory_impact_comment.py | 196 +++++++++++++---------------- script/ci_memory_impact_extract.py | 15 +-- 4 files changed, 116 insertions(+), 139 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 3e85c4d869..942caabe70 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -77,7 +77,7 @@ class MemoryAnalyzer: readelf_path: str | None = None, external_components: set[str] | None = None, idedata: "IDEData | None" = None, - ): + ) -> None: """Initialize memory analyzer. Args: @@ -311,15 +311,13 @@ class MemoryAnalyzer: potential_cppfilt, ) else: - _LOGGER.info( - "✗ Using system c++filt (objdump_path=%s)", self.objdump_path - ) + _LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path) # Strip GCC optimization suffixes and prefixes before demangling # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt # Prefixes like _GLOBAL__sub_I_ need to be removed and tracked - symbols_stripped = [] - symbols_prefixes = [] # Track removed prefixes + symbols_stripped: list[str] = [] + symbols_prefixes: list[str] = [] # Track removed prefixes for symbol in symbols: # Remove GCC optimization markers stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol) @@ -327,12 +325,11 @@ class MemoryAnalyzer: # Handle GCC global constructor/initializer prefixes # _GLOBAL__sub_I_ -> extract for demangling prefix = "" - if stripped.startswith("_GLOBAL__sub_I_"): - prefix = "_GLOBAL__sub_I_" - stripped = stripped[len(prefix) :] - elif stripped.startswith("_GLOBAL__sub_D_"): - prefix = "_GLOBAL__sub_D_" - stripped = stripped[len(prefix) :] + for gcc_prefix in _GCC_PREFIX_ANNOTATIONS: + if stripped.startswith(gcc_prefix): + prefix = gcc_prefix + stripped = stripped[len(prefix) :] + break symbols_stripped.append(stripped) symbols_prefixes.append(prefix) @@ -405,17 +402,18 @@ class MemoryAnalyzer: if stripped == demangled and stripped.startswith("_Z"): failed_count += 1 if failed_count <= 5: # Only log first 5 failures - _LOGGER.warning("Failed to demangle: %s", original[:100]) + _LOGGER.warning("Failed to demangle: %s", original) - if failed_count > 0: - _LOGGER.warning( - "Failed to demangle %d/%d symbols using %s", - failed_count, - len(symbols), - cppfilt_cmd, - ) - else: - _LOGGER.warning("Successfully demangled all %d symbols", len(symbols)) + if failed_count == 0: + _LOGGER.info("Successfully demangled all %d symbols", len(symbols)) + return + + _LOGGER.warning( + "Failed to demangle %d/%d symbols using %s", + failed_count, + len(symbols), + cppfilt_cmd, + ) @staticmethod def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str: diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index bcf9f45de9..a2366430dd 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -83,7 +83,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): total_ram = sum(c.ram_total for _, c in components) # Build report - lines = [] + lines: list[str] = [] lines.append("=" * self.TABLE_WIDTH) lines.append("Component Memory Analysis".center(self.TABLE_WIDTH)) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 8b0dbb6f58..d177b101a8 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -411,137 +411,115 @@ def find_existing_comment(pr_number: str) -> str | None: Returns: Comment numeric ID if found, None otherwise + + Raises: + subprocess.CalledProcessError: If gh command fails """ - try: - print( - f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr - ) + print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr) - # Use gh api to get comments directly - this returns the numeric id field - result = subprocess.run( - [ - "gh", - "api", - f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", - "--jq", - ".[] | {id, body}", - ], - capture_output=True, - text=True, - check=True, - ) + # Use gh api to get comments directly - this returns the numeric id field + result = subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", + "--jq", + ".[] | {id, body}", + ], + capture_output=True, + text=True, + check=True, + ) - print( - f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", - file=sys.stderr, - ) + print( + f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", + file=sys.stderr, + ) - # Parse comments and look for our marker - comment_count = 0 - for line in result.stdout.strip().split("\n"): - if not line: - continue + # Parse comments and look for our marker + comment_count = 0 + for line in result.stdout.strip().split("\n"): + if not line: + continue - try: - comment = json.loads(line) - comment_count += 1 - comment_id = comment.get("id") + try: + comment = json.loads(line) + comment_count += 1 + comment_id = comment.get("id") + print( + f"DEBUG: Checking comment {comment_count}: id={comment_id}", + file=sys.stderr, + ) + + body = comment.get("body", "") + if COMMENT_MARKER in body: print( - f"DEBUG: Checking comment {comment_count}: id={comment_id}", + f"DEBUG: Found existing comment with id={comment_id}", file=sys.stderr, ) + # Return the numeric id + return str(comment_id) + print("DEBUG: Comment does not contain marker", file=sys.stderr) + except json.JSONDecodeError as e: + print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) + continue - body = comment.get("body", "") - if COMMENT_MARKER in body: - print( - f"DEBUG: Found existing comment with id={comment_id}", - file=sys.stderr, - ) - # Return the numeric id - return str(comment_id) - print("DEBUG: Comment does not contain marker", file=sys.stderr) - except json.JSONDecodeError as e: - print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) - continue - - print( - f"DEBUG: No existing comment found (checked {comment_count} comments)", - file=sys.stderr, - ) - return None - - except subprocess.CalledProcessError as e: - print(f"Error finding existing comment: {e}", file=sys.stderr) - if e.stderr: - print(f"stderr: {e.stderr.decode()}", file=sys.stderr) - return None + print( + f"DEBUG: No existing comment found (checked {comment_count} comments)", + file=sys.stderr, + ) + return None -def post_or_update_comment(pr_number: str, comment_body: str) -> bool: +def post_or_update_comment(pr_number: str, comment_body: str) -> None: """Post a new comment or update existing one. Args: pr_number: PR number comment_body: Comment body text - Returns: - True if successful, False otherwise + Raises: + subprocess.CalledProcessError: If gh command fails """ # Look for existing comment existing_comment_id = find_existing_comment(pr_number) - try: - if existing_comment_id and existing_comment_id != "None": - # Update existing comment - print( - f"DEBUG: Updating existing comment {existing_comment_id}", - file=sys.stderr, - ) - result = subprocess.run( - [ - "gh", - "api", - f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}", - "-X", - "PATCH", - "-f", - f"body={comment_body}", - ], - check=True, - capture_output=True, - text=True, - ) - print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) - else: - # Post new comment - print( - f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})", - file=sys.stderr, - ) - result = subprocess.run( - ["gh", "pr", "comment", pr_number, "--body", comment_body], - check=True, - capture_output=True, - text=True, - ) - print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) + if existing_comment_id and existing_comment_id != "None": + # Update existing comment + print( + f"DEBUG: Updating existing comment {existing_comment_id}", + file=sys.stderr, + ) + result = subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}", + "-X", + "PATCH", + "-f", + f"body={comment_body}", + ], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) + else: + # Post new comment + print( + f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})", + file=sys.stderr, + ) + result = subprocess.run( + ["gh", "pr", "comment", pr_number, "--body", comment_body], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) - print("Comment posted/updated successfully", file=sys.stderr) - return True - - except subprocess.CalledProcessError as e: - print(f"Error posting/updating comment: {e}", file=sys.stderr) - if e.stderr: - print( - f"stderr: {e.stderr.decode() if isinstance(e.stderr, bytes) else e.stderr}", - file=sys.stderr, - ) - if e.stdout: - print( - f"stdout: {e.stdout.decode() if isinstance(e.stdout, bytes) else e.stdout}", - file=sys.stderr, - ) - return False + print("Comment posted/updated successfully", file=sys.stderr) def main() -> int: diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 76632ebc33..5522d522f0 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -27,6 +27,11 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) # pylint: disable=wrong-import-position from script.ci_helpers import write_github_output +# Regex patterns for extracting memory usage from PlatformIO output +_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)") + def extract_from_compile_output( output_text: str, @@ -42,7 +47,7 @@ def extract_from_compile_output( Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) Also extracts build directory from lines like: - INFO Deleting /path/to/build/.esphome/build/componenttestesp8266ard/.pioenvs + INFO Compiling app... Build path: /path/to/build Args: output_text: Compile output text (may contain multiple builds) @@ -51,12 +56,8 @@ def extract_from_compile_output( Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found """ # Find all RAM and Flash matches (may be multiple builds) - ram_matches = re.findall( - r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text - ) - flash_matches = re.findall( - r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", output_text - ) + ram_matches = _RAM_PATTERN.findall(output_text) + flash_matches = _FLASH_PATTERN.findall(output_text) if not ram_matches or not flash_matches: return None, None, None From 0b077bdfc62c2d2923356ecec37ad27821b610aa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:08:52 -1000 Subject: [PATCH 074/336] preen --- script/ci_memory_impact_extract.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 5522d522f0..17ac788ae3 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -70,7 +70,7 @@ def extract_from_compile_output( # Look for: INFO Compiling app... Build path: /path/to/build # Note: Multiple builds reuse the same build path (each overwrites the previous) build_dir = None - if match := re.search(r"Build path: (.+)", output_text): + if match := _BUILD_PATH_PATTERN.search(output_text): build_dir = match.group(1).strip() return total_ram, total_flash, build_dir @@ -210,11 +210,7 @@ def main() -> int: return 1 # Count how many builds were found - num_builds = len( - re.findall( - r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes", compile_output - ) - ) + num_builds = len(_RAM_PATTERN.findall(compile_output)) if num_builds > 1: print( From 07ad32968e585919374e9c7c891bdb355501a9f9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:15:46 -1000 Subject: [PATCH 075/336] template all the things --- script/ci_memory_impact_comment.py | 296 +++++++----------- .../ci_memory_impact_comment_template.j2 | 27 ++ .../ci_memory_impact_component_breakdown.j2 | 15 + script/templates/ci_memory_impact_macros.j2 | 8 + .../ci_memory_impact_symbol_changes.j2 | 51 +++ 5 files changed, 216 insertions(+), 181 deletions(-) create mode 100644 script/templates/ci_memory_impact_comment_template.j2 create mode 100644 script/templates/ci_memory_impact_component_breakdown.j2 create mode 100644 script/templates/ci_memory_impact_macros.j2 create mode 100644 script/templates/ci_memory_impact_symbol_changes.j2 diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index d177b101a8..961c304e40 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -14,6 +14,8 @@ from pathlib import Path import subprocess import sys +from jinja2 import Environment, FileSystemLoader + # Add esphome to path for analyze_memory import sys.path.insert(0, str(Path(__file__).parent.parent)) @@ -26,6 +28,22 @@ COMMENT_MARKER = "" OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes +# Display limits for tables +MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table +MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show +MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show +MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show + +# Symbol display formatting +SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using
tag +SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary + +# Component change noise threshold +COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes + +# Template directory +TEMPLATE_DIR = Path(__file__).parent / "templates" + def load_analysis_json(json_path: str) -> dict | None: """Load memory analysis results from JSON file. @@ -111,35 +129,20 @@ def format_change(before: int, after: int, threshold: float | None = None) -> st return f"{emoji} {delta_str} ({pct_str})" -def format_symbol_for_display(symbol: str) -> str: - """Format a symbol name for display in markdown table. - - Args: - symbol: Symbol name to format - - Returns: - Formatted symbol with backticks or HTML details tag for long names - """ - if len(symbol) <= 100: - return f"`{symbol}`" - # Use HTML details for very long symbols (no backticks inside HTML) - return f"
{symbol[:97]}...{symbol}
" - - -def create_symbol_changes_table( +def prepare_symbol_changes_data( target_symbols: dict | None, pr_symbols: dict | None -) -> str: - """Create a markdown table showing symbols that changed size. +) -> dict | None: + """Prepare symbol changes data for template rendering. Args: target_symbols: Symbol name to size mapping for target branch pr_symbols: Symbol name to size mapping for PR branch Returns: - Formatted markdown table + Dictionary with changed, new, and removed symbols, or None if no changes """ if not target_symbols or not pr_symbols: - return "" + return None # Find all symbols that exist in both branches or only in one all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) @@ -165,113 +168,39 @@ def create_symbol_changes_table( changed_symbols.append((symbol, target_size, pr_size, delta)) if not changed_symbols and not new_symbols and not removed_symbols: - return "" + return None - lines = [ - "", - "
", - "🔍 Symbol-Level Changes (click to expand)", - "", - ] + # Sort by size/delta + changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) + new_symbols.sort(key=lambda x: x[1], reverse=True) + removed_symbols.sort(key=lambda x: x[1], reverse=True) - # Show changed symbols (sorted by absolute delta) - if changed_symbols: - changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) - lines.extend( - [ - "### Changed Symbols", - "", - "| Symbol | Target Size | PR Size | Change |", - "|--------|-------------|---------|--------|", - ] - ) - - # Show top 30 changes - for symbol, target_size, pr_size, delta in changed_symbols[:30]: - target_str = format_bytes(target_size) - pr_str = format_bytes(pr_size) - change_str = format_change(target_size, pr_size) # Chart icons only - display_symbol = format_symbol_for_display(symbol) - lines.append( - f"| {display_symbol} | {target_str} | {pr_str} | {change_str} |" - ) - - if len(changed_symbols) > 30: - lines.append( - f"| ... | ... | ... | *({len(changed_symbols) - 30} more changed symbols not shown)* |" - ) - lines.append("") - - # Show new symbols - if new_symbols: - new_symbols.sort(key=lambda x: x[1], reverse=True) - lines.extend( - [ - "### New Symbols (top 15)", - "", - "| Symbol | Size |", - "|--------|------|", - ] - ) - - for symbol, size in new_symbols[:15]: - display_symbol = format_symbol_for_display(symbol) - lines.append(f"| {display_symbol} | {format_bytes(size)} |") - - if len(new_symbols) > 15: - total_new_size = sum(s[1] for s in new_symbols) - lines.append( - f"| *{len(new_symbols) - 15} more new symbols...* | *Total: {format_bytes(total_new_size)}* |" - ) - lines.append("") - - # Show removed symbols - if removed_symbols: - removed_symbols.sort(key=lambda x: x[1], reverse=True) - lines.extend( - [ - "### Removed Symbols (top 15)", - "", - "| Symbol | Size |", - "|--------|------|", - ] - ) - - for symbol, size in removed_symbols[:15]: - display_symbol = format_symbol_for_display(symbol) - lines.append(f"| {display_symbol} | {format_bytes(size)} |") - - if len(removed_symbols) > 15: - total_removed_size = sum(s[1] for s in removed_symbols) - lines.append( - f"| *{len(removed_symbols) - 15} more removed symbols...* | *Total: {format_bytes(total_removed_size)}* |" - ) - lines.append("") - - lines.extend(["
", ""]) - - return "\n".join(lines) + return { + "changed_symbols": changed_symbols, + "new_symbols": new_symbols, + "removed_symbols": removed_symbols, + } -def create_detailed_breakdown_table( +def prepare_component_breakdown_data( target_analysis: dict | None, pr_analysis: dict | None -) -> str: - """Create a markdown table showing detailed memory breakdown by component. +) -> list[tuple[str, int, int, int]] | None: + """Prepare component breakdown data for template rendering. Args: target_analysis: Component memory breakdown for target branch pr_analysis: Component memory breakdown for PR branch Returns: - Formatted markdown table + List of tuples (component, target_flash, pr_flash, delta), or None if no changes """ if not target_analysis or not pr_analysis: - return "" + return None # Combine all components from both analyses all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) - # Filter to components that have changed (ignoring noise ≤2 bytes) + # Filter to components that have changed (ignoring noise) changed_components = [] for comp in all_components: target_mem = target_analysis.get(comp, {}) @@ -280,43 +209,18 @@ def create_detailed_breakdown_table( target_flash = target_mem.get("flash_total", 0) pr_flash = pr_mem.get("flash_total", 0) - # Only include if component has meaningful change (>2 bytes) + # Only include if component has meaningful change (above noise threshold) delta = pr_flash - target_flash - if abs(delta) > 2: + if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD: changed_components.append((comp, target_flash, pr_flash, delta)) if not changed_components: - return "" + return None # Sort by absolute delta (largest changes first) changed_components.sort(key=lambda x: abs(x[3]), reverse=True) - # Build table - limit to top 20 changes - lines = [ - "", - "
", - "📊 Component Memory Breakdown", - "", - "| Component | Target Flash | PR Flash | Change |", - "|-----------|--------------|----------|--------|", - ] - - for comp, target_flash, pr_flash, delta in changed_components[:20]: - target_str = format_bytes(target_flash) - pr_str = format_bytes(pr_flash) - # Only apply threshold to ESPHome components, not framework/infrastructure - threshold = COMPONENT_CHANGE_THRESHOLD if comp.startswith("[esphome]") else None - change_str = format_change(target_flash, pr_flash, threshold=threshold) - lines.append(f"| `{comp}` | {target_str} | {pr_str} | {change_str} |") - - if len(changed_components) > 20: - lines.append( - f"| ... | ... | ... | *({len(changed_components) - 20} more components not shown)* |" - ) - - lines.extend(["", "
", ""]) - - return "\n".join(lines) + return changed_components def create_comment_body( @@ -332,7 +236,7 @@ def create_comment_body( pr_symbols: dict | None = None, target_cache_hit: bool = False, ) -> str: - """Create the comment body with memory impact analysis. + """Create the comment body with memory impact analysis using Jinja2 templates. Args: components: List of component names (merged config) @@ -350,57 +254,87 @@ def create_comment_body( Returns: Formatted comment body """ - ram_change = format_change(target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD) - flash_change = format_change( - target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD + # Set up Jinja2 environment + env = Environment( + loader=FileSystemLoader(TEMPLATE_DIR), + trim_blocks=True, + lstrip_blocks=True, ) - # Use provided analysis data if available - component_breakdown = "" - symbol_changes = "" + # Register custom filters + env.filters["format_bytes"] = format_bytes + env.filters["format_change"] = format_change - if target_analysis and pr_analysis: - component_breakdown = create_detailed_breakdown_table( - target_analysis, pr_analysis - ) - - if target_symbols and pr_symbols: - symbol_changes = create_symbol_changes_table(target_symbols, pr_symbols) - else: - print("No ELF files provided, skipping detailed analysis", file=sys.stderr) + # Prepare template context + context = { + "comment_marker": COMMENT_MARKER, + "platform": platform, + "target_ram": format_bytes(target_ram), + "pr_ram": format_bytes(pr_ram), + "target_flash": format_bytes(target_flash), + "pr_flash": format_bytes(pr_flash), + "ram_change": format_change( + target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD + ), + "flash_change": format_change( + target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD + ), + "target_cache_hit": target_cache_hit, + "component_change_threshold": COMPONENT_CHANGE_THRESHOLD, + } # Format components list if len(components) == 1: - components_str = f"`{components[0]}`" - config_note = "a representative test configuration" + context["components_str"] = f"`{components[0]}`" + context["config_note"] = "a representative test configuration" else: - components_str = ", ".join(f"`{c}`" for c in sorted(components)) - config_note = f"a merged configuration with {len(components)} components" + context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components)) + context["config_note"] = ( + f"a merged configuration with {len(components)} components" + ) - # Add cache info note if target was cached - cache_note = "" - if target_cache_hit: - cache_note = "\n\n> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI)." + # Prepare component breakdown if available + component_breakdown = "" + if target_analysis and pr_analysis: + changed_components = prepare_component_breakdown_data( + target_analysis, pr_analysis + ) + if changed_components: + template = env.get_template("ci_memory_impact_component_breakdown.j2") + component_breakdown = template.render( + changed_components=changed_components, + format_bytes=format_bytes, + format_change=format_change, + component_change_threshold=COMPONENT_CHANGE_THRESHOLD, + max_rows=MAX_COMPONENT_BREAKDOWN_ROWS, + ) - return f"""{COMMENT_MARKER} -## Memory Impact Analysis + # Prepare symbol changes if available + symbol_changes = "" + if target_symbols and pr_symbols: + symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols) + if symbol_data: + template = env.get_template("ci_memory_impact_symbol_changes.j2") + symbol_changes = template.render( + **symbol_data, + format_bytes=format_bytes, + format_change=format_change, + max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS, + max_new_rows=MAX_NEW_SYMBOLS_ROWS, + max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS, + symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH, + symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH, + ) -**Components:** {components_str} -**Platform:** `{platform}` + if not target_analysis or not pr_analysis: + print("No ELF files provided, skipping detailed analysis", file=sys.stderr) -| Metric | Target Branch | This PR | Change | -|--------|--------------|---------|--------| -| **RAM** | {format_bytes(target_ram)} | {format_bytes(pr_ram)} | {ram_change} | -| **Flash** | {format_bytes(target_flash)} | {format_bytes(pr_flash)} | {flash_change} | -{component_breakdown}{symbol_changes}{cache_note} + context["component_breakdown"] = component_breakdown + context["symbol_changes"] = symbol_changes ---- -> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). -> **Dynamic memory (heap)** cannot be measured automatically. -> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues. - -*This analysis runs automatically when components change. Memory usage is measured from {config_note}.* -""" + # Render main template + template = env.get_template("ci_memory_impact_comment_template.j2") + return template.render(**context) def find_existing_comment(pr_number: str) -> str | None: @@ -605,9 +539,9 @@ def main() -> int: ) # Post or update comment - success = post_or_update_comment(args.pr_number, comment_body) + post_or_update_comment(args.pr_number, comment_body) - return 0 if success else 1 + return 0 if __name__ == "__main__": diff --git a/script/templates/ci_memory_impact_comment_template.j2 b/script/templates/ci_memory_impact_comment_template.j2 new file mode 100644 index 0000000000..4c8d7f4865 --- /dev/null +++ b/script/templates/ci_memory_impact_comment_template.j2 @@ -0,0 +1,27 @@ +{{ comment_marker }} +## Memory Impact Analysis + +**Components:** {{ components_str }} +**Platform:** `{{ platform }}` + +| Metric | Target Branch | This PR | Change | +|--------|--------------|---------|--------| +| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} | +| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} | +{% if component_breakdown %} +{{ component_breakdown }} +{%- endif %} +{%- if symbol_changes %} +{{ symbol_changes }} +{%- endif %} +{%- if target_cache_hit %} + +> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI). +{%- endif %} + +--- +> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). +> **Dynamic memory (heap)** cannot be measured automatically. +> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues. + +*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.* diff --git a/script/templates/ci_memory_impact_component_breakdown.j2 b/script/templates/ci_memory_impact_component_breakdown.j2 new file mode 100644 index 0000000000..a781e5c546 --- /dev/null +++ b/script/templates/ci_memory_impact_component_breakdown.j2 @@ -0,0 +1,15 @@ + +
+📊 Component Memory Breakdown + +| Component | Target Flash | PR Flash | Change | +|-----------|--------------|----------|--------| +{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%} +{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%} +| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} | +{% endfor -%} +{% if changed_components|length > max_rows -%} +| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* | +{% endif -%} + +
diff --git a/script/templates/ci_memory_impact_macros.j2 b/script/templates/ci_memory_impact_macros.j2 new file mode 100644 index 0000000000..9fb346a7c5 --- /dev/null +++ b/script/templates/ci_memory_impact_macros.j2 @@ -0,0 +1,8 @@ +{#- Macro for formatting symbol names in tables -#} +{%- macro format_symbol(symbol, max_length, truncate_length) -%} +{%- if symbol|length <= max_length -%} +`{{ symbol }}` +{%- else -%} +
{{ symbol[:truncate_length] }}...{{ symbol }}
+{%- endif -%} +{%- endmacro -%} diff --git a/script/templates/ci_memory_impact_symbol_changes.j2 b/script/templates/ci_memory_impact_symbol_changes.j2 new file mode 100644 index 0000000000..bd540712f8 --- /dev/null +++ b/script/templates/ci_memory_impact_symbol_changes.j2 @@ -0,0 +1,51 @@ +{%- from 'ci_memory_impact_macros.j2' import format_symbol -%} + +
+🔍 Symbol-Level Changes (click to expand) + +{%- if changed_symbols %} + +### Changed Symbols + +| Symbol | Target Size | PR Size | Change | +|--------|-------------|---------|--------| +{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} | +{% endfor -%} +{% if changed_symbols|length > max_changed_rows -%} +| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* | +{% endif -%} + +{%- endif %} +{%- if new_symbols %} + +### New Symbols (top {{ max_new_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in new_symbols[:max_new_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if new_symbols|length > max_new_rows -%} +{% set total_new_size = new_symbols|sum(attribute=1) -%} +| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* | +{% endif -%} + +{%- endif %} +{%- if removed_symbols %} + +### Removed Symbols (top {{ max_removed_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in removed_symbols[:max_removed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if removed_symbols|length > max_removed_rows -%} +{% set total_removed_size = removed_symbols|sum(attribute=1) -%} +| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* | +{% endif -%} + +{%- endif %} + +
From ba18bb6a4fedb7946c0a462957fdbfe960bb1eb3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:18:15 -1000 Subject: [PATCH 076/336] template all the things --- script/ci_memory_impact_comment.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 961c304e40..5a399639f5 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -148,9 +148,11 @@ def prepare_symbol_changes_data( all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) # Track changes - changed_symbols = [] - new_symbols = [] - removed_symbols = [] + changed_symbols: list[ + tuple[str, int, int, int] + ] = [] # (symbol, target_size, pr_size, delta) + new_symbols: list[tuple[str, int]] = [] # (symbol, size) + removed_symbols: list[tuple[str, int]] = [] # (symbol, size) for symbol in all_symbols: target_size = target_symbols.get(symbol, 0) @@ -201,7 +203,9 @@ def prepare_component_breakdown_data( all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) # Filter to components that have changed (ignoring noise) - changed_components = [] + changed_components: list[ + tuple[str, int, int, int] + ] = [] # (comp, target_flash, pr_flash, delta) for comp in all_components: target_mem = target_analysis.get(comp, {}) pr_mem = pr_analysis.get(comp, {}) From a078486a878406a6fd85a8d995e9453bf1d52561 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:21:28 -1000 Subject: [PATCH 077/336] update test --- tests/script/test_determine_jobs.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index f8557ef6b6..24c77b6ae9 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -17,6 +17,9 @@ script_dir = os.path.abspath( ) sys.path.insert(0, script_dir) +# Import helpers module for patching +import helpers # noqa: E402 + spec = importlib.util.spec_from_file_location( "determine_jobs", os.path.join(script_dir, "determine-jobs.py") ) @@ -478,9 +481,10 @@ def test_main_filters_components_without_tests( airthings_dir = tests_dir / "airthings_ble" airthings_dir.mkdir(parents=True) - # Mock root_path to use tmp_path + # Mock root_path to use tmp_path (need to patch both determine_jobs and helpers) with ( patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), patch("sys.argv", ["determine-jobs.py"]), ): # Clear the cache since we're mocking root_path From 7e54803edea0b24f0892129e4a66d39dd44da5b3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:25:41 -1000 Subject: [PATCH 078/336] update test --- esphome/analyze_memory/cli.py | 19 +++---- script/ci_memory_impact_comment.py | 82 ++++++++++++++++++------------ script/ci_memory_impact_extract.py | 30 +++++------ 3 files changed, 75 insertions(+), 56 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index a2366430dd..5713eac94c 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -371,15 +371,16 @@ def main(): idedata = None for idedata_path in idedata_candidates: - if idedata_path.exists(): - try: - with open(idedata_path, encoding="utf-8") as f: - raw_data = json.load(f) - idedata = IDEData(raw_data) - print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) - break - except (json.JSONDecodeError, OSError) as e: - print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) if not idedata: print( diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 5a399639f5..4e3fbb9086 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -409,6 +409,54 @@ def find_existing_comment(pr_number: str) -> str | None: return None +def update_existing_comment(comment_id: str, comment_body: str) -> None: + """Update an existing comment. + + Args: + comment_id: Comment ID to update + comment_body: New comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr) + result = subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}", + "-X", + "PATCH", + "-f", + f"body={comment_body}", + ], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) + + +def create_new_comment(pr_number: str, comment_body: str) -> None: + """Create a new PR comment. + + Args: + pr_number: PR number + comment_body: Comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr) + result = subprocess.run( + ["gh", "pr", "comment", pr_number, "--body", comment_body], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) + + def post_or_update_comment(pr_number: str, comment_body: str) -> None: """Post a new comment or update existing one. @@ -423,39 +471,9 @@ def post_or_update_comment(pr_number: str, comment_body: str) -> None: existing_comment_id = find_existing_comment(pr_number) if existing_comment_id and existing_comment_id != "None": - # Update existing comment - print( - f"DEBUG: Updating existing comment {existing_comment_id}", - file=sys.stderr, - ) - result = subprocess.run( - [ - "gh", - "api", - f"/repos/{{owner}}/{{repo}}/issues/comments/{existing_comment_id}", - "-X", - "PATCH", - "-f", - f"body={comment_body}", - ], - check=True, - capture_output=True, - text=True, - ) - print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) + update_existing_comment(existing_comment_id, comment_body) else: - # Post new comment - print( - f"DEBUG: Posting new comment (existing_comment_id={existing_comment_id})", - file=sys.stderr, - ) - result = subprocess.run( - ["gh", "pr", "comment", pr_number, "--body", comment_body], - check=True, - capture_output=True, - text=True, - ) - print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) + create_new_comment(pr_number, comment_body) print("Comment posted/updated successfully", file=sys.stderr) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py index 17ac788ae3..77d59417e3 100755 --- a/script/ci_memory_impact_extract.py +++ b/script/ci_memory_impact_extract.py @@ -25,6 +25,8 @@ import sys sys.path.insert(0, str(Path(__file__).parent.parent)) # pylint: disable=wrong-import-position +from esphome.analyze_memory import MemoryAnalyzer +from esphome.platformio_api import IDEData from script.ci_helpers import write_github_output # Regex patterns for extracting memory usage from PlatformIO output @@ -85,9 +87,6 @@ def run_detailed_analysis(build_dir: str) -> dict | None: Returns: Dictionary with analysis results or None if analysis fails """ - from esphome.analyze_memory import MemoryAnalyzer - from esphome.platformio_api import IDEData - build_path = Path(build_dir) if not build_path.exists(): print(f"Build directory not found: {build_dir}", file=sys.stderr) @@ -120,18 +119,19 @@ def run_detailed_analysis(build_dir: str) -> dict | None: idedata = None for idedata_path in idedata_candidates: - if idedata_path.exists(): - try: - with open(idedata_path, encoding="utf-8") as f: - raw_data = json.load(f) - idedata = IDEData(raw_data) - print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) - break - except (json.JSONDecodeError, OSError) as e: - print( - f"Warning: Failed to load idedata from {idedata_path}: {e}", - file=sys.stderr, - ) + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print( + f"Warning: Failed to load idedata from {idedata_path}: {e}", + file=sys.stderr, + ) analyzer = MemoryAnalyzer(elf_path, idedata=idedata) components = analyzer.analyze() From 85e0a4fbf9e966a096d61cfcf086640ee88c6be3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:29:36 -1000 Subject: [PATCH 079/336] update test --- esphome/platformio_api.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index cc48562b4c..c50bb2acff 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -378,19 +378,17 @@ class IDEData: @property def objdump_path(self) -> str: # replace gcc at end with objdump - - # Windows - if self.cc_path.endswith(".exe"): - return f"{self.cc_path[:-7]}objdump.exe" - - return f"{self.cc_path[:-3]}objdump" + return ( + f"{self.cc_path[:-7]}objdump.exe" + if self.cc_path.endswith(".exe") + else f"{self.cc_path[:-3]}objdump" + ) @property def readelf_path(self) -> str: # replace gcc at end with readelf - - # Windows - if self.cc_path.endswith(".exe"): - return f"{self.cc_path[:-7]}readelf.exe" - - return f"{self.cc_path[:-3]}readelf" + return ( + f"{self.cc_path[:-7]}readelf.exe" + if self.cc_path.endswith(".exe") + else f"{self.cc_path[:-3]}readelf" + ) From 541fb8b27c3cc302923fd41ad6c3f6bdb9b06ea9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:32:22 -1000 Subject: [PATCH 080/336] update test --- esphome/analyze_memory/__init__.py | 40 +++++++++++++++++++----------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 942caabe70..db16051b8a 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -33,6 +33,21 @@ _GCC_PREFIX_ANNOTATIONS = { "_GLOBAL__sub_D_": "global destructor for", } +# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2) +_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)") + +# C++ runtime patterns for categorization +_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"]) + +# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.) +_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"]) + +# Regex pattern for parsing readelf section headers +# Format: [ #] name type addr off size +_READELF_SECTION_PATTERN = re.compile( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)" +) + @dataclass class MemorySection: @@ -133,12 +148,7 @@ class MemoryAnalyzer: # Parse section headers for line in result.stdout.splitlines(): # Look for section entries - if not ( - match := re.match( - r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)", - line, - ) - ): + if not (match := _READELF_SECTION_PATTERN.match(line)): continue section_name = match.group(1) @@ -273,14 +283,14 @@ class MemoryAnalyzer: # Check if spi_flash vs spi_driver if "spi_" in symbol_name or "SPI" in symbol_name: - if "spi_flash" in symbol_name: - return "spi_flash" - return "spi_driver" + return "spi_flash" if "spi_flash" in symbol_name else "spi_driver" # libc special printf variants - if symbol_name.startswith("_") and symbol_name[1:].replace("_r", "").replace( - "v", "" - ).replace("s", "") in ["printf", "fprintf", "sprintf", "scanf"]: + if ( + symbol_name.startswith("_") + and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "") + in _LIBC_PRINTF_SCANF_FAMILY + ): return "libc" # Track uncategorized symbols for analysis @@ -320,7 +330,7 @@ class MemoryAnalyzer: symbols_prefixes: list[str] = [] # Track removed prefixes for symbol in symbols: # Remove GCC optimization markers - stripped = re.sub(r"\$(?:isra|part|constprop)\$\d+", "", symbol) + stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol) # Handle GCC global constructor/initializer prefixes # _GLOBAL__sub_I_ -> extract for demangling @@ -450,7 +460,7 @@ class MemoryAnalyzer: Returns: Demangled name with suffix annotation """ - suffix_match = re.search(r"(\$(?:isra|part|constprop)\$\d+)", original) + suffix_match = _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original) if suffix_match: return f"{demangled} [{suffix_match.group(1)}]" return demangled @@ -462,7 +472,7 @@ class MemoryAnalyzer: def _categorize_esphome_core_symbol(self, demangled: str) -> str: """Categorize ESPHome core symbols into subcategories.""" # Special patterns that need to be checked separately - if any(pattern in demangled for pattern in ["vtable", "typeinfo", "thunk"]): + if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS): return "C++ Runtime (vtables/RTTI)" if demangled.startswith("std::"): From f9807db08ab7218f5f14570814378fc95aba3ff1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:37:24 -1000 Subject: [PATCH 081/336] preen --- esphome/analyze_memory/__init__.py | 21 +++++++++++++-------- esphome/analyze_memory/cli.py | 22 ++++++++++++++++------ 2 files changed, 29 insertions(+), 14 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index db16051b8a..15cadaf859 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -48,6 +48,12 @@ _READELF_SECTION_PATTERN = re.compile( r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)" ) +# Component category prefixes +_COMPONENT_PREFIX_ESPHOME = "[esphome]" +_COMPONENT_PREFIX_EXTERNAL = "[external]" +_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core" +_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api" + @dataclass class MemorySection: @@ -222,7 +228,7 @@ class MemoryAnalyzer: self._uncategorized_symbols.append((symbol_name, demangled, size)) # Track ESPHome core symbols for detailed analysis - if component == "[esphome]core" and size > 0: + if component == _COMPONENT_CORE and size > 0: demangled = self._demangle_symbol(symbol_name) self._esphome_core_symbols.append((symbol_name, demangled, size)) @@ -246,7 +252,7 @@ class MemoryAnalyzer: for component_name in get_esphome_components(): patterns = get_component_class_patterns(component_name) if any(pattern in demangled for pattern in patterns): - return f"[esphome]{component_name}" + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" # Check for ESPHome component namespaces match = ESPHOME_COMPONENT_PATTERN.search(demangled) @@ -257,17 +263,17 @@ class MemoryAnalyzer: # Check if this is an actual component in the components directory if component_name in get_esphome_components(): - return f"[esphome]{component_name}" + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" # Check if this is a known external component from the config if component_name in self.external_components: - return f"[external]{component_name}" + return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}" # Everything else in esphome:: namespace is core - return "[esphome]core" + return _COMPONENT_CORE # Check for esphome core namespace (no component namespace) if "esphome::" in demangled: # If no component match found, it's core - return "[esphome]core" + return _COMPONENT_CORE # Check against symbol patterns for component, patterns in SYMBOL_PATTERNS.items(): @@ -460,8 +466,7 @@ class MemoryAnalyzer: Returns: Demangled name with suffix annotation """ - suffix_match = _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original) - if suffix_match: + if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original): return f"{demangled} [{suffix_match.group(1)}]" return demangled diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 5713eac94c..1695a00c19 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -3,7 +3,13 @@ from collections import defaultdict import sys -from . import MemoryAnalyzer +from . import ( + _COMPONENT_API, + _COMPONENT_CORE, + _COMPONENT_PREFIX_ESPHOME, + _COMPONENT_PREFIX_EXTERNAL, + MemoryAnalyzer, +) class MemoryAnalyzerCLI(MemoryAnalyzer): @@ -144,7 +150,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): if self._esphome_core_symbols: lines.append("") lines.append("=" * self.TABLE_WIDTH) - lines.append("[esphome]core Detailed Analysis".center(self.TABLE_WIDTH)) + lines.append( + f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH) + ) lines.append("=" * self.TABLE_WIDTH) lines.append("") @@ -185,7 +193,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): # Top 15 largest core symbols lines.append("") - lines.append("Top 15 Largest [esphome]core Symbols:") + lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:") sorted_core_symbols = sorted( self._esphome_core_symbols, key=lambda x: x[2], reverse=True ) @@ -199,10 +207,12 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): esphome_components = [ (name, mem) for name, mem in components - if name.startswith("[esphome]") and name != "[esphome]core" + if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE ] external_components = [ - (name, mem) for name, mem in components if name.startswith("[external]") + (name, mem) + for name, mem in components + if name.startswith(_COMPONENT_PREFIX_EXTERNAL) ] top_esphome_components = sorted( @@ -217,7 +227,7 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): # Check if API component exists and ensure it's included api_component = None for name, mem in components: - if name == "[esphome]api": + if name == _COMPONENT_API: api_component = (name, mem) break From 4f4da1de22acb050c0641a98828f0f6e231c2487 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:41:12 -1000 Subject: [PATCH 082/336] preen --- esphome/analyze_memory/__init__.py | 17 +++++++++++------ esphome/analyze_memory/helpers.py | 13 +++++++++---- esphome/platformio_api.py | 14 ++++++++------ 3 files changed, 28 insertions(+), 16 deletions(-) diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py index 15cadaf859..71e86e3788 100644 --- a/esphome/analyze_memory/__init__.py +++ b/esphome/analyze_memory/__init__.py @@ -54,15 +54,20 @@ _COMPONENT_PREFIX_EXTERNAL = "[external]" _COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core" _COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api" +# C++ namespace prefixes +_NAMESPACE_ESPHOME = "esphome::" +_NAMESPACE_STD = "std::" + +# Type alias for symbol information: (symbol_name, size, component) +SymbolInfoType = tuple[str, int, str] + @dataclass class MemorySection: """Represents a memory section with its symbols.""" name: str - symbols: list[tuple[str, int, str]] = field( - default_factory=list - ) # (symbol_name, size, component) + symbols: list[SymbolInfoType] = field(default_factory=list) total_size: int = 0 @@ -246,7 +251,7 @@ class MemoryAnalyzer: # Check for special component classes first (before namespace pattern) # This handles cases like esphome::ESPHomeOTAComponent which should map to ota - if "esphome::" in demangled: + if _NAMESPACE_ESPHOME in demangled: # Check for special component classes that include component name in the class # For example: esphome::ESPHomeOTAComponent -> ota component for component_name in get_esphome_components(): @@ -271,7 +276,7 @@ class MemoryAnalyzer: return _COMPONENT_CORE # Check for esphome core namespace (no component namespace) - if "esphome::" in demangled: + if _NAMESPACE_ESPHOME in demangled: # If no component match found, it's core return _COMPONENT_CORE @@ -480,7 +485,7 @@ class MemoryAnalyzer: if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS): return "C++ Runtime (vtables/RTTI)" - if demangled.startswith("std::"): + if demangled.startswith(_NAMESPACE_STD): return "C++ STL" # Check against patterns from const.py diff --git a/esphome/analyze_memory/helpers.py b/esphome/analyze_memory/helpers.py index 1b5a1c67c2..cb503b37c5 100644 --- a/esphome/analyze_memory/helpers.py +++ b/esphome/analyze_memory/helpers.py @@ -5,6 +5,11 @@ from pathlib import Path from .const import SECTION_MAPPING +# Import namespace constant from parent module +# Note: This would create a circular import if done at module level, +# so we'll define it locally here as well +_NAMESPACE_ESPHOME = "esphome::" + # Get the list of actual ESPHome components by scanning the components directory @cache @@ -40,10 +45,10 @@ def get_component_class_patterns(component_name: str) -> list[str]: component_upper = component_name.upper() component_camel = component_name.replace("_", "").title() return [ - f"esphome::{component_upper}Component", # e.g., esphome::OTAComponent - f"esphome::ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent - f"esphome::{component_camel}Component", # e.g., esphome::OtaComponent - f"esphome::ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent ] diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index c50bb2acff..d59523a74a 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -378,17 +378,19 @@ class IDEData: @property def objdump_path(self) -> str: # replace gcc at end with objdump + path = self.cc_path return ( - f"{self.cc_path[:-7]}objdump.exe" - if self.cc_path.endswith(".exe") - else f"{self.cc_path[:-3]}objdump" + f"{path[:-7]}objdump.exe" + if path.endswith(".exe") + else f"{path[:-3]}objdump" ) @property def readelf_path(self) -> str: # replace gcc at end with readelf + path = self.cc_path return ( - f"{self.cc_path[:-7]}readelf.exe" - if self.cc_path.endswith(".exe") - else f"{self.cc_path[:-3]}readelf" + f"{path[:-7]}readelf.exe" + if path.endswith(".exe") + else f"{path[:-3]}readelf" ) From 7f2d8a2c118da393b7758dee2ae215ddd50985fc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:46:41 -1000 Subject: [PATCH 083/336] whitespace --- .../ci_memory_impact_comment_template.j2 | 6 +- .../ci_memory_impact_symbol_changes.j2 | 12 +- tests/script/test_determine_jobs.py | 182 ++++++++++++++++++ 3 files changed, 191 insertions(+), 9 deletions(-) diff --git a/script/templates/ci_memory_impact_comment_template.j2 b/script/templates/ci_memory_impact_comment_template.j2 index 4c8d7f4865..9fbf78e99f 100644 --- a/script/templates/ci_memory_impact_comment_template.j2 +++ b/script/templates/ci_memory_impact_comment_template.j2 @@ -10,10 +10,10 @@ | **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} | {% if component_breakdown %} {{ component_breakdown }} -{%- endif %} -{%- if symbol_changes %} +{% endif %} +{% if symbol_changes %} {{ symbol_changes }} -{%- endif %} +{% endif %} {%- if target_cache_hit %} > ⚡ Target branch analysis was loaded from cache (build skipped for faster CI). diff --git a/script/templates/ci_memory_impact_symbol_changes.j2 b/script/templates/ci_memory_impact_symbol_changes.j2 index bd540712f8..60f2f50e48 100644 --- a/script/templates/ci_memory_impact_symbol_changes.j2 +++ b/script/templates/ci_memory_impact_symbol_changes.j2 @@ -3,7 +3,7 @@
🔍 Symbol-Level Changes (click to expand) -{%- if changed_symbols %} +{% if changed_symbols %} ### Changed Symbols @@ -16,8 +16,8 @@ | ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* | {% endif -%} -{%- endif %} -{%- if new_symbols %} +{% endif %} +{% if new_symbols %} ### New Symbols (top {{ max_new_rows }}) @@ -31,8 +31,8 @@ | *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* | {% endif -%} -{%- endif %} -{%- if removed_symbols %} +{% endif %} +{% if removed_symbols %} ### Removed Symbols (top {{ max_removed_rows }}) @@ -46,6 +46,6 @@ | *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* | {% endif -%} -{%- endif %} +{% endif %}
diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 24c77b6ae9..b479fc03c5 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -504,3 +504,185 @@ def test_main_filters_components_without_tests( # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" + + +# Tests for detect_memory_impact_config function + + +def test_detect_memory_impact_config_with_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components share a common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component with esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # api component with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return wifi and api component changes + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/api/api.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "api"} + assert result["platform"] == "esp32-idf" # Common platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: + """Test memory impact detection with core-only changes (no component changes).""" + # Create test directory structure with fallback component + tests_dir = tmp_path / "tests" / "components" + + # api component (fallback component) with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return only core files (no component files) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/core/application.cpp", + "esphome/core/component.h", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert result["components"] == ["api"] # Fallback component + assert result["platform"] == "esp32-idf" # Fallback platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_no_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components have no common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component only has esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # logger component only has esp8266-ard test + logger_dir = tests_dir / "logger" + logger_dir.mkdir(parents=True) + (logger_dir / "test.esp8266-ard.yaml").write_text("test: logger") + + # Mock changed_files to return both components + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/logger/logger.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should pick the most frequently supported platform + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "logger"} + # When no common platform, picks most commonly supported + # esp8266-ard is preferred over esp32-idf in the preference list + assert result["platform"] in ["esp32-idf", "esp8266-ard"] + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_no_changes(tmp_path: Path) -> None: + """Test memory impact detection when no files changed.""" + # Mock changed_files to return empty list + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_no_components_with_tests(tmp_path: Path) -> None: + """Test memory impact detection when changed components have no tests.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # Create component directory but no test files + custom_component_dir = tests_dir / "my_custom_component" + custom_component_dir.mkdir(parents=True) + + # Mock changed_files to return component without tests + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/my_custom_component/component.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -> None: + """Test that base bus components (i2c, spi, uart) are skipped.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # i2c component (should be skipped as it's a base bus component) + i2c_dir = tests_dir / "i2c" + i2c_dir.mkdir(parents=True) + (i2c_dir / "test.esp32-idf.yaml").write_text("test: i2c") + + # wifi component (should not be skipped) + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # Mock changed_files to return both i2c and wifi + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/i2c/i2c.cpp", + "esphome/components/wifi/wifi.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should only include wifi, not i2c + assert result["should_run"] == "true" + assert result["components"] == ["wifi"] + assert "i2c" not in result["components"] From e70cb098ae25af4ea59a5b2a8d792d20212c9d50 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 18:50:07 -1000 Subject: [PATCH 084/336] whitespace --- tests/unit_tests/test_platformio_api.py | 36 +++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/unit_tests/test_platformio_api.py b/tests/unit_tests/test_platformio_api.py index 07948cc6ad..13ef3516e4 100644 --- a/tests/unit_tests/test_platformio_api.py +++ b/tests/unit_tests/test_platformio_api.py @@ -387,6 +387,42 @@ def test_idedata_addr2line_path_unix(setup_core: Path) -> None: assert result == "/usr/bin/addr2line" +def test_idedata_objdump_path_windows(setup_core: Path) -> None: + """Test IDEData.objdump_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "C:\\tools\\objdump.exe" + + +def test_idedata_objdump_path_unix(setup_core: Path) -> None: + """Test IDEData.objdump_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "/usr/bin/objdump" + + +def test_idedata_readelf_path_windows(setup_core: Path) -> None: + """Test IDEData.readelf_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "C:\\tools\\readelf.exe" + + +def test_idedata_readelf_path_unix(setup_core: Path) -> None: + """Test IDEData.readelf_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "/usr/bin/readelf" + + def test_patch_structhash(setup_core: Path) -> None: """Test patch_structhash monkey patches platformio functions.""" # Create simple namespace objects to act as modules From b4ae85cf0fd56c785fd94633f65e2ca7fdbfbc3a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 21:03:51 -1000 Subject: [PATCH 085/336] cleanup sorting --- esphome/components/sensor/filter.cpp | 46 ++++++++++++++++++---------- esphome/components/sensor/filter.h | 8 ++--- 2 files changed, 33 insertions(+), 21 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 1cc744e3b5..1eb0b84964 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -65,32 +65,41 @@ optional SlidingWindowFilter::new_value(float value) { } // SortedWindowFilter -FixedVector SortedWindowFilter::get_sorted_values_() { +FixedVector SortedWindowFilter::get_window_values_() { // Copy window without NaN values using FixedVector (no heap allocation) - FixedVector sorted_values; - sorted_values.init(this->window_count_); + // Returns unsorted values - caller will use std::nth_element for partial sorting as needed + FixedVector values; + values.init(this->window_count_); for (size_t i = 0; i < this->window_count_; i++) { float v = this->window_[i]; if (!std::isnan(v)) { - sorted_values.push_back(v); + values.push_back(v); } } - std::sort(sorted_values.begin(), sorted_values.end()); - return sorted_values; + return values; } // MedianFilter float MedianFilter::compute_result() { - FixedVector sorted_values = this->get_sorted_values_(); - if (sorted_values.empty()) + FixedVector values = this->get_window_values_(); + if (values.empty()) return NAN; - size_t size = sorted_values.size(); + size_t size = values.size(); + size_t mid = size / 2; + if (size % 2) { - return sorted_values[size / 2]; - } else { - return (sorted_values[size / 2] + sorted_values[(size / 2) - 1]) / 2.0f; + // Odd number of elements - use nth_element to find middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + return values[mid]; } + // Even number of elements - need both middle elements + // Use nth_element to find upper middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + float upper = values[mid]; + // Find the maximum of the lower half (which is now everything before mid) + float lower = *std::max_element(values.begin(), values.begin() + mid); + return (lower + upper) / 2.0f; } // SkipInitialFilter @@ -111,13 +120,16 @@ QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t sen : SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {} float QuantileFilter::compute_result() { - FixedVector sorted_values = this->get_sorted_values_(); - if (sorted_values.empty()) + FixedVector values = this->get_window_values_(); + if (values.empty()) return NAN; - size_t position = ceilf(sorted_values.size() * this->quantile_) - 1; - ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, sorted_values.size()); - return sorted_values[position]; + size_t position = ceilf(values.size() * this->quantile_) - 1; + ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, values.size()); + + // Use nth_element to find the quantile element (O(n) instead of O(n log n)) + std::nth_element(values.begin(), values.begin() + position, values.end()); + return values[position]; } // MinFilter diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index d99cd79f05..57bb06b517 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -95,17 +95,17 @@ class MinMaxFilter : public SlidingWindowFilter { /** Base class for filters that need a sorted window (Median, Quantile). * - * Extends SlidingWindowFilter to provide a helper that creates a sorted copy - * of non-NaN values from the window. + * Extends SlidingWindowFilter to provide a helper that filters out NaN values. + * Derived classes use std::nth_element for efficient partial sorting. */ class SortedWindowFilter : public SlidingWindowFilter { public: using SlidingWindowFilter::SlidingWindowFilter; protected: - /// Helper to get sorted non-NaN values from the window + /// Helper to get non-NaN values from the window (not sorted - caller will use nth_element) /// Returns empty FixedVector if all values are NaN - FixedVector get_sorted_values_(); + FixedVector get_window_values_(); }; /** Simple quantile filter. From 6a96e0ee9073fbef36139818adc434833de94010 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 09:38:37 -1000 Subject: [PATCH 086/336] [light] Use bitmask instead of std::set for color modes --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_connection.cpp | 6 +- esphome/components/api/api_pb2.cpp | 6 +- esphome/components/api/api_pb2.h | 2 +- esphome/components/api/api_pb2_dump.cpp | 2 +- esphome/components/light/color_mode.h | 127 ++++++++++++++++++++++ esphome/components/light/light_call.cpp | 8 +- esphome/components/light/light_call.h | 3 +- esphome/components/light/light_traits.h | 24 ++-- 9 files changed, 151 insertions(+), 29 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 753adc3592..4c1de4c4f5 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -506,7 +506,7 @@ message ListEntitiesLightResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set"]; + repeated ColorMode supported_color_modes = 12 [(fixed_vector) = true]; // next four supports_* are for legacy clients, newer clients should use color modes // Deprecated in API version 1.6 bool legacy_supports_brightness = 5 [deprecated=true]; diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 1f3456a205..32b0f0d953 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -477,7 +477,11 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); - msg.supported_color_modes = &traits.get_supported_color_modes_for_api_(); + const auto &color_modes_mask = traits.get_supported_color_modes(); + msg.supported_color_modes.init(color_modes_mask.size()); + for (auto mode : color_modes_mask) { + msg.supported_color_modes.push_back(static_cast(mode)); + } if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index 37bcf5d8a0..6bc434b658 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -471,7 +471,7 @@ void ListEntitiesLightResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_string(1, this->object_id_ref_); buffer.encode_fixed32(2, this->key); buffer.encode_string(3, this->name_ref_); - for (const auto &it : *this->supported_color_modes) { + for (auto &it : this->supported_color_modes) { buffer.encode_uint32(12, static_cast(it), true); } buffer.encode_float(9, this->min_mireds); @@ -492,8 +492,8 @@ void ListEntitiesLightResponse::calculate_size(ProtoSize &size) const { size.add_length(1, this->object_id_ref_.size()); size.add_fixed32(1, this->key); size.add_length(1, this->name_ref_.size()); - if (!this->supported_color_modes->empty()) { - for (const auto &it : *this->supported_color_modes) { + if (!this->supported_color_modes.empty()) { + for (const auto &it : this->supported_color_modes) { size.add_uint32_force(1, static_cast(it)); } } diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 5603204801..528b8fc108 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage { #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_light_response"; } #endif - const std::set *supported_color_modes{}; + FixedVector supported_color_modes{}; float min_mireds{0.0f}; float max_mireds{0.0f}; std::vector effects{}; diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index e803125f53..cda68ceee8 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -913,7 +913,7 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const { dump_field(out, "object_id", this->object_id_ref_); dump_field(out, "key", this->key); dump_field(out, "name", this->name_ref_); - for (const auto &it : *this->supported_color_modes) { + for (const auto &it : this->supported_color_modes) { dump_field(out, "supported_color_modes", static_cast(it), 4); } dump_field(out, "min_mireds", this->min_mireds); diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index e524763c9f..d58ab73fdf 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -104,5 +104,132 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } +/// Bitmask for storing a set of ColorMode values efficiently. +/// Replaces std::set to eliminate red-black tree overhead (~586 bytes). +class ColorModeMask { + public: + constexpr ColorModeMask() = default; + + /// Support initializer list syntax: {ColorMode::RGB, ColorMode::WHITE} + constexpr ColorModeMask(std::initializer_list modes) { + for (auto mode : modes) { + this->add(mode); + } + } + + constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); } + + constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; } + + constexpr size_t size() const { + // Count set bits + uint16_t n = this->mask_; + size_t count = 0; + while (n) { + count += n & 1; + n >>= 1; + } + return count; + } + + /// Iterator support for API encoding + class Iterator { + public: + using iterator_category = std::forward_iterator_tag; + using value_type = ColorMode; + using difference_type = std::ptrdiff_t; + using pointer = const ColorMode *; + using reference = ColorMode; + + constexpr Iterator(uint16_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit(); } + + constexpr ColorMode operator*() const { return bit_to_mode(bit_); } + + constexpr Iterator &operator++() { + ++bit_; + advance_to_next_set_bit(); + return *this; + } + + constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; } + + constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } + + private: + constexpr void advance_to_next_set_bit() { + while (bit_ < 16 && !(mask_ & (1 << bit_))) { + ++bit_; + } + } + + uint16_t mask_; + int bit_; + }; + + constexpr Iterator begin() const { return Iterator(mask_, 0); } + constexpr Iterator end() const { return Iterator(mask_, 16); } + + private: + uint16_t mask_{0}; + + /// Map ColorMode enum values to bit positions (0-9) + static constexpr int mode_to_bit(ColorMode mode) { + // Using switch instead of lookup table to avoid RAM usage on ESP8266 + // The compiler optimizes this efficiently + switch (mode) { + case ColorMode::UNKNOWN: + return 0; + case ColorMode::ON_OFF: + return 1; + case ColorMode::BRIGHTNESS: + return 2; + case ColorMode::WHITE: + return 3; + case ColorMode::COLOR_TEMPERATURE: + return 4; + case ColorMode::COLD_WARM_WHITE: + return 5; + case ColorMode::RGB: + return 6; + case ColorMode::RGB_WHITE: + return 7; + case ColorMode::RGB_COLOR_TEMPERATURE: + return 8; + case ColorMode::RGB_COLD_WARM_WHITE: + return 9; + default: + return 0; + } + } + + static constexpr ColorMode bit_to_mode(int bit) { + // Using switch instead of lookup table to avoid RAM usage on ESP8266 + switch (bit) { + case 0: + return ColorMode::UNKNOWN; + case 1: + return ColorMode::ON_OFF; + case 2: + return ColorMode::BRIGHTNESS; + case 3: + return ColorMode::WHITE; + case 4: + return ColorMode::COLOR_TEMPERATURE; + case 5: + return ColorMode::COLD_WARM_WHITE; + case 6: + return ColorMode::RGB; + case 7: + return ColorMode::RGB_WHITE; + case 8: + return ColorMode::RGB_COLOR_TEMPERATURE; + case 9: + return ColorMode::RGB_COLD_WARM_WHITE; + default: + return ColorMode::UNKNOWN; + } + } +}; + } // namespace light } // namespace esphome diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 915b8fdf89..3e4e449614 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -425,10 +425,10 @@ ColorMode LightCall::compute_color_mode_() { // If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. - std::set suitable_modes = this->get_suitable_color_modes_(); + ColorModeMask suitable_modes = this->get_suitable_color_modes_(); // Don't change if the current mode is suitable. - if (suitable_modes.count(current_mode) > 0) { + if (suitable_modes.contains(current_mode)) { ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(current_mode))); return current_mode; @@ -436,7 +436,7 @@ ColorMode LightCall::compute_color_mode_() { // Use the preferred suitable mode. for (auto mode : suitable_modes) { - if (supported_modes.count(mode) == 0) + if (!supported_modes.contains(mode)) continue; ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(), @@ -451,7 +451,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -std::set LightCall::get_suitable_color_modes_() { +ColorModeMask LightCall::get_suitable_color_modes_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index d3a526b136..e87ccd3efd 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -1,7 +1,6 @@ #pragma once #include "light_color_values.h" -#include namespace esphome { @@ -187,7 +186,7 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); /// Get potential color modes for this light call. - std::set get_suitable_color_modes_(); + ColorModeMask get_suitable_color_modes_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index a45301d148..94f1301694 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -2,7 +2,6 @@ #include "esphome/core/helpers.h" #include "color_mode.h" -#include namespace esphome { @@ -19,12 +18,15 @@ class LightTraits { public: LightTraits() = default; - const std::set &get_supported_color_modes() const { return this->supported_color_modes_; } - void set_supported_color_modes(std::set supported_color_modes) { - this->supported_color_modes_ = std::move(supported_color_modes); + const ColorModeMask &get_supported_color_modes() const { return this->supported_color_modes_; } + void set_supported_color_modes(ColorModeMask supported_color_modes) { + this->supported_color_modes_ = supported_color_modes; + } + void set_supported_color_modes(std::initializer_list modes) { + this->supported_color_modes_ = ColorModeMask(modes); } - bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode); } + bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); } bool supports_color_capability(ColorCapability color_capability) const { for (auto mode : this->supported_color_modes_) { if (mode & color_capability) @@ -59,17 +61,7 @@ class LightTraits { void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; } protected: -#ifdef USE_API - // The API connection is a friend class to access internal methods - friend class api::APIConnection; - // This method returns a reference to the internal color modes set. - // It is used by the API to avoid copying data when encoding messages. - // Warning: Do not use this method outside of the API connection code. - // It returns a reference to internal data that can be invalidated. - const std::set &get_supported_color_modes_for_api_() const { return this->supported_color_modes_; } -#endif - - std::set supported_color_modes_{}; + ColorModeMask supported_color_modes_{}; float min_mireds_{0}; float max_mireds_{0}; }; From c76e386a79d16a2935f69c10fe352d2fb9fd07e9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 09:59:24 -1000 Subject: [PATCH 087/336] no vector --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_options.proto | 9 +++++ esphome/components/api/api_pb2.cpp | 14 +++++--- esphome/components/api/api_pb2.h | 2 +- esphome/components/api/api_pb2_dump.cpp | 6 ++-- script/api_protobuf/api_protobuf.py | 42 ++++++++++++++++++++++++ 6 files changed, 65 insertions(+), 10 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 4c1de4c4f5..c64fc038d6 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -506,7 +506,7 @@ message ListEntitiesLightResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - repeated ColorMode supported_color_modes = 12 [(fixed_vector) = true]; + repeated ColorMode supported_color_modes = 12 [(enum_as_bitmask) = true]; // next four supports_* are for legacy clients, newer clients should use color modes // Deprecated in API version 1.6 bool legacy_supports_brightness = 5 [deprecated=true]; diff --git a/esphome/components/api/api_options.proto b/esphome/components/api/api_options.proto index ead8ac0bbc..450b5e83de 100644 --- a/esphome/components/api/api_options.proto +++ b/esphome/components/api/api_options.proto @@ -70,4 +70,13 @@ extend google.protobuf.FieldOptions { // init(size) before adding elements. This eliminates std::vector template overhead // and is ideal when the exact size is known before populating the array. optional bool fixed_vector = 50013 [default=false]; + + // enum_as_bitmask: Encode repeated enum fields as a uint32_t bitmask + // When set on a repeated enum field, the field will be stored as a single uint32_t + // where each bit represents whether that enum value is present. This is ideal for + // enums with ≤32 values and eliminates all vector template instantiation overhead. + // The enum values should be sequential starting from 0. + // Encoding: bit N set means enum value N is present in the set. + // Example: {ColorMode::RGB, ColorMode::WHITE} → bitmask with bits 5 and 6 set + optional bool enum_as_bitmask = 50014 [default=false]; } diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index 6bc434b658..c7b88bb312 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -471,8 +471,10 @@ void ListEntitiesLightResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_string(1, this->object_id_ref_); buffer.encode_fixed32(2, this->key); buffer.encode_string(3, this->name_ref_); - for (auto &it : this->supported_color_modes) { - buffer.encode_uint32(12, static_cast(it), true); + for (uint8_t bit = 0; bit < 32; bit++) { + if (this->supported_color_modes & (1U << bit)) { + buffer.encode_uint32(12, bit, true); + } } buffer.encode_float(9, this->min_mireds); buffer.encode_float(10, this->max_mireds); @@ -492,9 +494,11 @@ void ListEntitiesLightResponse::calculate_size(ProtoSize &size) const { size.add_length(1, this->object_id_ref_.size()); size.add_fixed32(1, this->key); size.add_length(1, this->name_ref_.size()); - if (!this->supported_color_modes.empty()) { - for (const auto &it : this->supported_color_modes) { - size.add_uint32_force(1, static_cast(it)); + if (this->supported_color_modes != 0) { + for (uint8_t bit = 0; bit < 32; bit++) { + if (this->supported_color_modes & (1U << bit)) { + size.add_uint32_force(1, static_cast(bit)); + } } } size.add_float(1, this->min_mireds); diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 528b8fc108..5b86b7f276 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage { #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_light_response"; } #endif - FixedVector supported_color_modes{}; + uint32_t supported_color_modes{}; float min_mireds{0.0f}; float max_mireds{0.0f}; std::vector effects{}; diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index cda68ceee8..f9f45ad071 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -913,9 +913,9 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const { dump_field(out, "object_id", this->object_id_ref_); dump_field(out, "key", this->key); dump_field(out, "name", this->name_ref_); - for (const auto &it : this->supported_color_modes) { - dump_field(out, "supported_color_modes", static_cast(it), 4); - } + out.append(" supported_color_modes: 0x"); + out.append(uint32_to_string(this->supported_color_modes)); + out.append("\n"); dump_field(out, "min_mireds", this->min_mireds); dump_field(out, "max_mireds", this->max_mireds); for (const auto &it : this->effects) { diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 4936434fc2..9e140ca9ce 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1418,6 +1418,8 @@ class RepeatedTypeInfo(TypeInfo): self._use_pointer = bool(self._container_type) # Check if this should use FixedVector instead of std::vector self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False) + # Check if this should be encoded as a bitmask + self._use_bitmask = get_field_opt(field, pb.enum_as_bitmask, False) # For repeated fields, we need to get the base type info # but we can't call create_field_type_info as it would cause recursion @@ -1434,6 +1436,9 @@ class RepeatedTypeInfo(TypeInfo): @property def cpp_type(self) -> str: + if self._use_bitmask: + # For bitmask fields, store as a single uint32_t + return "uint32_t" if self._use_pointer and self._container_type: # For pointer fields, use the specified container type # If the container type already includes the element type (e.g., std::set) @@ -1466,6 +1471,12 @@ class RepeatedTypeInfo(TypeInfo): # Pointer fields don't support decoding if self._use_pointer: return None + if self._use_bitmask: + # For bitmask fields, decode enum value and set corresponding bit + content = self._ti.decode_varint + if content is None: + return None + return f"case {self.number}: this->{self.field_name} |= (1U << static_cast({content})); break;" content = self._ti.decode_varint if content is None: return None @@ -1519,6 +1530,18 @@ class RepeatedTypeInfo(TypeInfo): @property def encode_content(self) -> str: + if self._use_bitmask: + # For bitmask fields, iterate through set bits and encode each enum value + # The bitmask is stored as uint32_t where bit N represents enum value N + assert isinstance(self._ti, EnumType), ( + "enum_as_bitmask only works with enum fields" + ) + o = "for (uint8_t bit = 0; bit < 32; bit++) {\n" + o += f" if (this->{self.field_name} & (1U << bit)) {{\n" + o += f" buffer.{self._ti.encode_func}({self.number}, bit, true);\n" + o += " }\n" + o += "}" + return o if self._use_pointer: # For pointer fields, just dereference (pointer should never be null in our use case) o = f"for (const auto &it : *this->{self.field_name}) {{\n" @@ -1538,6 +1561,13 @@ class RepeatedTypeInfo(TypeInfo): @property def dump_content(self) -> str: + if self._use_bitmask: + # For bitmask fields, dump the hex value of the bitmask + return ( + f'out.append(" {self.field_name}: 0x");\n' + f"out.append(uint32_to_string(this->{self.field_name}));\n" + f'out.append("\\n");' + ) if self._use_pointer: # For pointer fields, dereference and use the existing helper return _generate_array_dump_content( @@ -1554,6 +1584,18 @@ class RepeatedTypeInfo(TypeInfo): # For repeated fields, we always need to pass force=True to the underlying type's calculation # This is because the encode method always sets force=true for repeated fields + if self._use_bitmask: + # For bitmask fields, iterate through set bits and calculate size + # Each set bit encodes one enum value (as varint) + o = f"if ({name} != 0) {{\n" + o += " for (uint8_t bit = 0; bit < 32; bit++) {\n" + o += f" if ({name} & (1U << bit)) {{\n" + o += f" {self._ti.get_size_calculation('bit', True)}\n" + o += " }\n" + o += " }\n" + o += "}" + return o + # Handle message types separately as they use a dedicated helper if isinstance(self._ti, MessageType): field_id_size = self._ti.calculate_field_id_size() From b01ab914f3ea0077d17a2554b925666e1f7bad9a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:01:39 -1000 Subject: [PATCH 088/336] tweak --- esphome/components/api/api_connection.cpp | 5 +---- esphome/components/light/color_mode.h | 3 +++ 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 32b0f0d953..2a570b53e8 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -478,10 +478,7 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c ListEntitiesLightResponse msg; auto traits = light->get_traits(); const auto &color_modes_mask = traits.get_supported_color_modes(); - msg.supported_color_modes.init(color_modes_mask.size()); - for (auto mode : color_modes_mask) { - msg.supported_color_modes.push_back(static_cast(mode)); - } + msg.supported_color_modes = color_modes_mask.get_mask(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index d58ab73fdf..fa3a0aaaac 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -169,6 +169,9 @@ class ColorModeMask { constexpr Iterator begin() const { return Iterator(mask_, 0); } constexpr Iterator end() const { return Iterator(mask_, 16); } + /// Get the raw bitmask value for API encoding + constexpr uint16_t get_mask() const { return this->mask_; } + private: uint16_t mask_{0}; From c0c30ba22dbe7fa4c687e0d80153865d2f8763f1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:02:45 -1000 Subject: [PATCH 089/336] tweak --- esphome/components/api/api_connection.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 2a570b53e8..74509691af 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -477,8 +477,7 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); - const auto &color_modes_mask = traits.get_supported_color_modes(); - msg.supported_color_modes = color_modes_mask.get_mask(); + msg.supported_color_modes = traits.get_supported_color_modes().get_mask(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); From 2dc6c56edce33e3f10f0c214294f729b29b3dfb0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:15:32 -1000 Subject: [PATCH 090/336] align --- esphome/components/light/light_traits.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index 94f1301694..0db028598c 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -61,9 +61,9 @@ class LightTraits { void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; } protected: - ColorModeMask supported_color_modes_{}; float min_mireds_{0}; float max_mireds_{0}; + ColorModeMask supported_color_modes_{}; }; } // namespace light From 599e636468758f9b415b0963eb0f314a88895839 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:17:52 -1000 Subject: [PATCH 091/336] comment --- esphome/components/light/color_mode.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index fa3a0aaaac..d9fdc24d35 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -173,6 +173,10 @@ class ColorModeMask { constexpr uint16_t get_mask() const { return this->mask_; } private: + // Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan). + // Currently only 10 ColorMode values exist, so 16 bits is sufficient. + // Can be changed to uint32_t if more than 16 color modes are needed in the future. + // Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes). uint16_t mask_{0}; /// Map ColorMode enum values to bit positions (0-9) From 957b5e98a78a00002ba1cdc2e4f44071e8ff667a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:18:34 -1000 Subject: [PATCH 092/336] comment --- esphome/components/light/color_mode.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index d9fdc24d35..77be58bb3b 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -141,13 +141,13 @@ class ColorModeMask { using pointer = const ColorMode *; using reference = ColorMode; - constexpr Iterator(uint16_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit(); } + constexpr Iterator(uint16_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } constexpr ColorMode operator*() const { return bit_to_mode(bit_); } constexpr Iterator &operator++() { ++bit_; - advance_to_next_set_bit(); + advance_to_next_set_bit_(); return *this; } @@ -156,7 +156,7 @@ class ColorModeMask { constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } private: - constexpr void advance_to_next_set_bit() { + constexpr void advance_to_next_set_bit_() { while (bit_ < 16 && !(mask_ & (1 << bit_))) { ++bit_; } From cfb061abc423965b1a2474ce50c3e6e0ebb0c0c2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:29:08 -1000 Subject: [PATCH 093/336] preen --- esphome/components/api/api_connection.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 74509691af..f7ee0619c5 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -453,7 +453,6 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection * bool is_single) { auto *light = static_cast(entity); LightStateResponse resp; - auto traits = light->get_traits(); auto values = light->remote_values; auto color_mode = values.get_color_mode(); resp.state = values.is_on(); From 98df9fd2ff49f1750c8255e7de18b0c478ab73d9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:32:20 -1000 Subject: [PATCH 094/336] preen --- esphome/components/light/light_call.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 3e4e449614..4e6251492d 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -406,7 +406,7 @@ void LightCall::transform_parameters_() { } } ColorMode LightCall::compute_color_mode_() { - auto supported_modes = this->parent_->get_traits().get_supported_color_modes(); + const auto &supported_modes = this->parent_->get_traits().get_supported_color_modes(); int supported_count = supported_modes.size(); // Some lights don't support any color modes (e.g. monochromatic light), leave it at unknown. From 13e9d0c85173cd983bdf256b32b7a90fbccb3cd7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:41:37 -1000 Subject: [PATCH 095/336] fix --- esphome/components/api/api_pb2_dump.cpp | 6 +++--- script/api_protobuf/api_protobuf.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index f9f45ad071..c47d95ed5d 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -913,9 +913,9 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const { dump_field(out, "object_id", this->object_id_ref_); dump_field(out, "key", this->key); dump_field(out, "name", this->name_ref_); - out.append(" supported_color_modes: 0x"); - out.append(uint32_to_string(this->supported_color_modes)); - out.append("\n"); + char buffer[32]; + snprintf(buffer, sizeof(buffer), " supported_color_modes: 0x%08" PRIX32 "\n", this->supported_color_modes); + out.append(buffer); dump_field(out, "min_mireds", this->min_mireds); dump_field(out, "max_mireds", this->max_mireds); for (const auto &it : this->effects) { diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 9e140ca9ce..8a841354a9 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1564,9 +1564,9 @@ class RepeatedTypeInfo(TypeInfo): if self._use_bitmask: # For bitmask fields, dump the hex value of the bitmask return ( - f'out.append(" {self.field_name}: 0x");\n' - f"out.append(uint32_to_string(this->{self.field_name}));\n" - f'out.append("\\n");' + f"char buffer[32];\n" + f'snprintf(buffer, sizeof(buffer), " {self.field_name}: 0x%08" PRIX32 "\\n", this->{self.field_name});\n' + f"out.append(buffer);" ) if self._use_pointer: # For pointer fields, dereference and use the existing helper From 596ce599918da9c749bdf1e866ff67ec5334860f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:42:51 -1000 Subject: [PATCH 096/336] dead code --- esphome/components/light/light_json_schema.cpp | 1 - esphome/components/light/light_state.cpp | 3 --- 2 files changed, 4 deletions(-) diff --git a/esphome/components/light/light_json_schema.cpp b/esphome/components/light/light_json_schema.cpp index 010e130612..e754c453b5 100644 --- a/esphome/components/light/light_json_schema.cpp +++ b/esphome/components/light/light_json_schema.cpp @@ -43,7 +43,6 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) { } auto values = state.remote_values; - auto traits = state.get_output()->get_traits(); const auto color_mode = values.get_color_mode(); const char *mode_str = get_color_mode_json_str(color_mode); diff --git a/esphome/components/light/light_state.cpp b/esphome/components/light/light_state.cpp index 1d139e49e7..979dc2f5a1 100644 --- a/esphome/components/light/light_state.cpp +++ b/esphome/components/light/light_state.cpp @@ -191,11 +191,9 @@ void LightState::current_values_as_brightness(float *brightness) { this->current_values.as_brightness(brightness, this->gamma_correct_); } void LightState::current_values_as_rgb(float *red, float *green, float *blue, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgb(red, green, blue, this->gamma_correct_, false); } void LightState::current_values_as_rgbw(float *red, float *green, float *blue, float *white, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgbw(red, green, blue, white, this->gamma_correct_, false); } void LightState::current_values_as_rgbww(float *red, float *green, float *blue, float *cold_white, float *warm_white, @@ -209,7 +207,6 @@ void LightState::current_values_as_rgbct(float *red, float *green, float *blue, white_brightness, this->gamma_correct_); } void LightState::current_values_as_cwww(float *cold_white, float *warm_white, bool constant_brightness) { - auto traits = this->get_traits(); this->current_values.as_cwww(cold_white, warm_white, this->gamma_correct_, constant_brightness); } void LightState::current_values_as_ct(float *color_temperature, float *white_brightness) { From 27b876df932908d3390167bb345baae846f33744 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 10:52:42 -1000 Subject: [PATCH 097/336] preen --- esphome/components/api/api_pb2_dump.cpp | 2 +- script/api_protobuf/api_protobuf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index c47d95ed5d..69143f50f8 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -913,7 +913,7 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const { dump_field(out, "object_id", this->object_id_ref_); dump_field(out, "key", this->key); dump_field(out, "name", this->name_ref_); - char buffer[32]; + char buffer[64]; snprintf(buffer, sizeof(buffer), " supported_color_modes: 0x%08" PRIX32 "\n", this->supported_color_modes); out.append(buffer); dump_field(out, "min_mireds", this->min_mireds); diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 8a841354a9..2fe6d01024 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1564,7 +1564,7 @@ class RepeatedTypeInfo(TypeInfo): if self._use_bitmask: # For bitmask fields, dump the hex value of the bitmask return ( - f"char buffer[32];\n" + f"char buffer[64];\n" f'snprintf(buffer, sizeof(buffer), " {self.field_name}: 0x%08" PRIX32 "\\n", this->{self.field_name});\n' f"out.append(buffer);" ) From ef52ce4d76e5a5c5d469b64b79bd7d83a15b349c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 11:56:40 -1000 Subject: [PATCH 098/336] [api_protobuf] Address copilot review: add bounds checking and clarify 32-bit loop intent - Add bounds checking in decode_varint_content to prevent undefined behavior if decoded enum value exceeds 31 - Add clarifying comments that 32-bit loops in encode_content and get_size_calculation are intentional to support the full range of enum_as_bitmask (enums with up to 32 values) - The uint32_t storage type supports general-purpose enum_as_bitmask, not just ColorMode's 10 values --- script/api_protobuf/api_protobuf.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 2fe6d01024..f423097b7f 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1472,11 +1472,16 @@ class RepeatedTypeInfo(TypeInfo): if self._use_pointer: return None if self._use_bitmask: - # For bitmask fields, decode enum value and set corresponding bit + # For bitmask fields, decode enum value and set corresponding bit, with bounds checking content = self._ti.decode_varint if content is None: return None - return f"case {self.number}: this->{self.field_name} |= (1U << static_cast({content})); break;" + return ( + f"case {self.number}: " + f"if (static_cast({content}) <= 31) " + f"this->{self.field_name} |= (1U << static_cast({content})); " + f"break;" + ) content = self._ti.decode_varint if content is None: return None @@ -1533,6 +1538,9 @@ class RepeatedTypeInfo(TypeInfo): if self._use_bitmask: # For bitmask fields, iterate through set bits and encode each enum value # The bitmask is stored as uint32_t where bit N represents enum value N + # Note: We iterate through all 32 bits to support the full range of enum_as_bitmask + # (enums with up to 32 values). Specific uses may have fewer values, but the + # generated code is general-purpose. assert isinstance(self._ti, EnumType), ( "enum_as_bitmask only works with enum fields" ) @@ -1587,6 +1595,9 @@ class RepeatedTypeInfo(TypeInfo): if self._use_bitmask: # For bitmask fields, iterate through set bits and calculate size # Each set bit encodes one enum value (as varint) + # Note: We iterate through all 32 bits to support the full range of enum_as_bitmask + # (enums with up to 32 values). Specific uses may have fewer values, but the + # generated code is general-purpose. o = f"if ({name} != 0) {{\n" o += " for (uint8_t bit = 0; bit < 32; bit++) {\n" o += f" if ({name} & (1U << bit)) {{\n" From 02b626ae1a9136e5f9c9fcb8720c2fd4ac536b2e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 12:00:29 -1000 Subject: [PATCH 099/336] fix --- script/api_protobuf/api_protobuf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index f423097b7f..0f3505f657 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1478,7 +1478,7 @@ class RepeatedTypeInfo(TypeInfo): return None return ( f"case {self.number}: " - f"if (static_cast({content}) <= 31) " + f"if (static_cast({content}) < 32) " f"this->{self.field_name} |= (1U << static_cast({content})); " f"break;" ) From f88cc33cfc784ff9565a82740bed16ee8290a46e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 12:01:57 -1000 Subject: [PATCH 100/336] fix --- script/api_protobuf/api_protobuf.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 0f3505f657..075efe88f9 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1472,15 +1472,9 @@ class RepeatedTypeInfo(TypeInfo): if self._use_pointer: return None if self._use_bitmask: - # For bitmask fields, decode enum value and set corresponding bit, with bounds checking - content = self._ti.decode_varint - if content is None: - return None - return ( - f"case {self.number}: " - f"if (static_cast({content}) < 32) " - f"this->{self.field_name} |= (1U << static_cast({content})); " - f"break;" + # Bitmask fields don't support decoding (only used for device->client messages) + raise RuntimeError( + f"enum_as_bitmask fields do not support decoding: {self.field_name}" ) content = self._ti.decode_varint if content is None: From 753bebdde8ade56c58d223698f9c62a3e2914a8a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 12:02:52 -1000 Subject: [PATCH 101/336] fix --- esphome/components/api/api_connection.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index f7ee0619c5..8be96c641b 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -476,6 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); + // msg.supported_color_modes is uint32_t, but get_mask() returns uint16_t + // The upper 16 bits are zero-extended during assignment (ColorMode only has 10 values) msg.supported_color_modes = traits.get_supported_color_modes().get_mask(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { From e27472b87db519abd0bb8cfdafb8ca0fb691ef40 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 12:35:13 -1000 Subject: [PATCH 102/336] fixes --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_connection.cpp | 5 +- esphome/components/api/api_options.proto | 17 +++--- esphome/components/api/api_pb2.cpp | 14 ++--- esphome/components/api/api_pb2.h | 2 +- esphome/components/api/api_pb2_dump.cpp | 6 +-- esphome/components/light/color_mode.h | 2 + script/api_protobuf/api_protobuf.py | 65 ++++++----------------- 8 files changed, 38 insertions(+), 75 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index c64fc038d6..d202486cfa 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -506,7 +506,7 @@ message ListEntitiesLightResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - repeated ColorMode supported_color_modes = 12 [(enum_as_bitmask) = true]; + repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"]; // next four supports_* are for legacy clients, newer clients should use color modes // Deprecated in API version 1.6 bool legacy_supports_brightness = 5 [deprecated=true]; diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 8be96c641b..c8a1d85ef1 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -476,9 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); - // msg.supported_color_modes is uint32_t, but get_mask() returns uint16_t - // The upper 16 bits are zero-extended during assignment (ColorMode only has 10 values) - msg.supported_color_modes = traits.get_supported_color_modes().get_mask(); + // Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values + msg.supported_color_modes = &traits.get_supported_color_modes(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); diff --git a/esphome/components/api/api_options.proto b/esphome/components/api/api_options.proto index 450b5e83de..6b33408e2f 100644 --- a/esphome/components/api/api_options.proto +++ b/esphome/components/api/api_options.proto @@ -71,12 +71,13 @@ extend google.protobuf.FieldOptions { // and is ideal when the exact size is known before populating the array. optional bool fixed_vector = 50013 [default=false]; - // enum_as_bitmask: Encode repeated enum fields as a uint32_t bitmask - // When set on a repeated enum field, the field will be stored as a single uint32_t - // where each bit represents whether that enum value is present. This is ideal for - // enums with ≤32 values and eliminates all vector template instantiation overhead. - // The enum values should be sequential starting from 0. - // Encoding: bit N set means enum value N is present in the set. - // Example: {ColorMode::RGB, ColorMode::WHITE} → bitmask with bits 5 and 6 set - optional bool enum_as_bitmask = 50014 [default=false]; + // container_pointer_no_template: Use a non-template container type for repeated fields + // Similar to container_pointer, but for containers that don't take template parameters. + // The container type is used as-is without appending element type. + // The container must have: + // - begin() and end() methods returning iterators + // - empty() method + // Example: [(container_pointer_no_template) = "light::ColorModeMask"] + // generates: const light::ColorModeMask *supported_color_modes{}; + optional string container_pointer_no_template = 50014; } diff --git a/esphome/components/api/api_pb2.cpp b/esphome/components/api/api_pb2.cpp index c7b88bb312..37bcf5d8a0 100644 --- a/esphome/components/api/api_pb2.cpp +++ b/esphome/components/api/api_pb2.cpp @@ -471,10 +471,8 @@ void ListEntitiesLightResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_string(1, this->object_id_ref_); buffer.encode_fixed32(2, this->key); buffer.encode_string(3, this->name_ref_); - for (uint8_t bit = 0; bit < 32; bit++) { - if (this->supported_color_modes & (1U << bit)) { - buffer.encode_uint32(12, bit, true); - } + for (const auto &it : *this->supported_color_modes) { + buffer.encode_uint32(12, static_cast(it), true); } buffer.encode_float(9, this->min_mireds); buffer.encode_float(10, this->max_mireds); @@ -494,11 +492,9 @@ void ListEntitiesLightResponse::calculate_size(ProtoSize &size) const { size.add_length(1, this->object_id_ref_.size()); size.add_fixed32(1, this->key); size.add_length(1, this->name_ref_.size()); - if (this->supported_color_modes != 0) { - for (uint8_t bit = 0; bit < 32; bit++) { - if (this->supported_color_modes & (1U << bit)) { - size.add_uint32_force(1, static_cast(bit)); - } + if (!this->supported_color_modes->empty()) { + for (const auto &it : *this->supported_color_modes) { + size.add_uint32_force(1, static_cast(it)); } } size.add_float(1, this->min_mireds); diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 5b86b7f276..ed49498176 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage { #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_light_response"; } #endif - uint32_t supported_color_modes{}; + const light::ColorModeMask *supported_color_modes{}; float min_mireds{0.0f}; float max_mireds{0.0f}; std::vector effects{}; diff --git a/esphome/components/api/api_pb2_dump.cpp b/esphome/components/api/api_pb2_dump.cpp index 69143f50f8..e803125f53 100644 --- a/esphome/components/api/api_pb2_dump.cpp +++ b/esphome/components/api/api_pb2_dump.cpp @@ -913,9 +913,9 @@ void ListEntitiesLightResponse::dump_to(std::string &out) const { dump_field(out, "object_id", this->object_id_ref_); dump_field(out, "key", this->key); dump_field(out, "name", this->name_ref_); - char buffer[64]; - snprintf(buffer, sizeof(buffer), " supported_color_modes: 0x%08" PRIX32 "\n", this->supported_color_modes); - out.append(buffer); + for (const auto &it : *this->supported_color_modes) { + dump_field(out, "supported_color_modes", static_cast(it), 4); + } dump_field(out, "min_mireds", this->min_mireds); dump_field(out, "max_mireds", this->max_mireds); for (const auto &it : this->effects) { diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 77be58bb3b..1241d59627 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -132,6 +132,8 @@ class ColorModeMask { return count; } + constexpr bool empty() const { return this->mask_ == 0; } + /// Iterator support for API encoding class Iterator { public: diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 075efe88f9..2f83b0bd79 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1415,11 +1415,15 @@ class RepeatedTypeInfo(TypeInfo): super().__init__(field) # Check if this is a pointer field by looking for container_pointer option self._container_type = get_field_opt(field, pb.container_pointer, "") - self._use_pointer = bool(self._container_type) + # Check for non-template container pointer + self._container_no_template = get_field_opt( + field, pb.container_pointer_no_template, "" + ) + self._use_pointer = bool(self._container_type) or bool( + self._container_no_template + ) # Check if this should use FixedVector instead of std::vector self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False) - # Check if this should be encoded as a bitmask - self._use_bitmask = get_field_opt(field, pb.enum_as_bitmask, False) # For repeated fields, we need to get the base type info # but we can't call create_field_type_info as it would cause recursion @@ -1436,15 +1440,18 @@ class RepeatedTypeInfo(TypeInfo): @property def cpp_type(self) -> str: - if self._use_bitmask: - # For bitmask fields, store as a single uint32_t - return "uint32_t" + if self._container_no_template: + # Non-template container: use type as-is without appending template parameters + return f"const {self._container_no_template}*" if self._use_pointer and self._container_type: # For pointer fields, use the specified container type - # If the container type already includes the element type (e.g., std::set) - # use it as-is, otherwise append the element type + # Two cases: + # 1. "std::set" - Full type with template params, use as-is + # 2. "std::set" - No <>, append the element type if "<" in self._container_type and ">" in self._container_type: + # Has template parameters specified, use as-is return f"const {self._container_type}*" + # No <> at all, append element type return f"const {self._container_type}<{self._ti.cpp_type}>*" if self._use_fixed_vector: return f"FixedVector<{self._ti.cpp_type}>" @@ -1471,11 +1478,6 @@ class RepeatedTypeInfo(TypeInfo): # Pointer fields don't support decoding if self._use_pointer: return None - if self._use_bitmask: - # Bitmask fields don't support decoding (only used for device->client messages) - raise RuntimeError( - f"enum_as_bitmask fields do not support decoding: {self.field_name}" - ) content = self._ti.decode_varint if content is None: return None @@ -1529,21 +1531,6 @@ class RepeatedTypeInfo(TypeInfo): @property def encode_content(self) -> str: - if self._use_bitmask: - # For bitmask fields, iterate through set bits and encode each enum value - # The bitmask is stored as uint32_t where bit N represents enum value N - # Note: We iterate through all 32 bits to support the full range of enum_as_bitmask - # (enums with up to 32 values). Specific uses may have fewer values, but the - # generated code is general-purpose. - assert isinstance(self._ti, EnumType), ( - "enum_as_bitmask only works with enum fields" - ) - o = "for (uint8_t bit = 0; bit < 32; bit++) {\n" - o += f" if (this->{self.field_name} & (1U << bit)) {{\n" - o += f" buffer.{self._ti.encode_func}({self.number}, bit, true);\n" - o += " }\n" - o += "}" - return o if self._use_pointer: # For pointer fields, just dereference (pointer should never be null in our use case) o = f"for (const auto &it : *this->{self.field_name}) {{\n" @@ -1563,13 +1550,6 @@ class RepeatedTypeInfo(TypeInfo): @property def dump_content(self) -> str: - if self._use_bitmask: - # For bitmask fields, dump the hex value of the bitmask - return ( - f"char buffer[64];\n" - f'snprintf(buffer, sizeof(buffer), " {self.field_name}: 0x%08" PRIX32 "\\n", this->{self.field_name});\n' - f"out.append(buffer);" - ) if self._use_pointer: # For pointer fields, dereference and use the existing helper return _generate_array_dump_content( @@ -1586,21 +1566,6 @@ class RepeatedTypeInfo(TypeInfo): # For repeated fields, we always need to pass force=True to the underlying type's calculation # This is because the encode method always sets force=true for repeated fields - if self._use_bitmask: - # For bitmask fields, iterate through set bits and calculate size - # Each set bit encodes one enum value (as varint) - # Note: We iterate through all 32 bits to support the full range of enum_as_bitmask - # (enums with up to 32 values). Specific uses may have fewer values, but the - # generated code is general-purpose. - o = f"if ({name} != 0) {{\n" - o += " for (uint8_t bit = 0; bit < 32; bit++) {\n" - o += f" if ({name} & (1U << bit)) {{\n" - o += f" {self._ti.get_size_calculation('bit', True)}\n" - o += " }\n" - o += " }\n" - o += "}" - return o - # Handle message types separately as they use a dedicated helper if isinstance(self._ti, MessageType): field_id_size = self._ti.calculate_field_id_size() From 3ef402ef6409c8abbee9ed2f60324e5d92ca9d90 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 12:38:02 -1000 Subject: [PATCH 103/336] cover --- tests/integration/test_light_calls.py | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/integration/test_light_calls.py b/tests/integration/test_light_calls.py index af90ddbe86..152896ba88 100644 --- a/tests/integration/test_light_calls.py +++ b/tests/integration/test_light_calls.py @@ -8,6 +8,7 @@ import asyncio from typing import Any from aioesphomeapi import LightState +from aioesphomeapi.model import ColorMode import pytest from .types import APIClientConnectedFactory, RunCompiledFunction @@ -40,6 +41,34 @@ async def test_light_calls( rgbcw_light = next(light for light in lights if "RGBCW" in light.name) rgb_light = next(light for light in lights if "RGB Light" in light.name) + # Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values + # not bit positions. This is critical - the bug was encoding bit position 6 instead of + # ColorMode.RGB (value 35). + + # RGB light should support RGB mode (ColorMode.RGB = 35) + assert ColorMode.RGB in rgb_light.supported_color_modes, ( + f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not a bit position + assert 35 in [mode.value for mode in rgb_light.supported_color_modes], ( + f"RGB light has wrong color mode values. Expected 35 (RGB), got: " + f"{[mode.value for mode in rgb_light.supported_color_modes]}" + ) + + # RGBCW light should support multiple modes including RGB_COLD_WARM_WHITE (value 51) + assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, ( + f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}" + ) + # Verify actual enum values + expected_rgbcw_modes = { + ColorMode.RGB_COLD_WARM_WHITE, # 51 + # May have other modes too + } + assert expected_rgbcw_modes.issubset(set(rgbcw_light.supported_color_modes)), ( + f"RGBCW light missing expected color modes. Got: " + f"{[f'{mode.name}={mode.value}' for mode in rgbcw_light.supported_color_modes]}" + ) + async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any: """Wait for a state change for the given entity key.""" loop = asyncio.get_event_loop() From 89903929f3da55a46840ba82dcd9a3f623807a8f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:05:59 -1000 Subject: [PATCH 104/336] preen --- esphome/components/light/color_mode.h | 95 ++++++++++++++++++------- esphome/components/light/light_call.cpp | 63 +++++++--------- esphome/components/light/light_call.h | 4 +- 3 files changed, 96 insertions(+), 66 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 1241d59627..059996b740 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -104,6 +104,9 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } +// Type alias for raw color mode bitmask values +using color_mode_bitmask_t = uint16_t; + /// Bitmask for storing a set of ColorMode values efficiently. /// Replaces std::set to eliminate red-black tree overhead (~586 bytes). class ColorModeMask { @@ -143,7 +146,7 @@ class ColorModeMask { using pointer = const ColorMode *; using reference = ColorMode; - constexpr Iterator(uint16_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } + constexpr Iterator(color_mode_bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } constexpr ColorMode operator*() const { return bit_to_mode(bit_); } @@ -159,52 +162,92 @@ class ColorModeMask { private: constexpr void advance_to_next_set_bit_() { - while (bit_ < 16 && !(mask_ & (1 << bit_))) { + while (bit_ < MAX_BIT_INDEX && !(mask_ & (1 << bit_))) { ++bit_; } } - uint16_t mask_; + color_mode_bitmask_t mask_; int bit_; }; constexpr Iterator begin() const { return Iterator(mask_, 0); } - constexpr Iterator end() const { return Iterator(mask_, 16); } + constexpr Iterator end() const { return Iterator(mask_, MAX_BIT_INDEX); } /// Get the raw bitmask value for API encoding - constexpr uint16_t get_mask() const { return this->mask_; } + constexpr color_mode_bitmask_t get_mask() const { return this->mask_; } + + /// Find the first set bit in a bitmask and return the corresponding ColorMode + /// Used for optimizing compute_color_mode_() intersection logic + static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) { + // Find the position of the first set bit (least significant bit) + int bit = 0; + while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) { + ++bit; + } + return bit_to_mode(bit); + } + + /// Check if a ColorMode is present in a raw bitmask value + /// Useful for checking intersection results without creating a temporary ColorModeMask + static constexpr bool mask_contains(color_mode_bitmask_t mask, ColorMode mode) { + return (mask & (1 << mode_to_bit(mode))) != 0; + } + + /// Build a bitmask of modes that match the given capability requirements + /// @param require_caps Capabilities that must be present in the mode + /// @param exclude_caps Capabilities that must not be present in the mode (for none case) + /// @return Raw bitmask value + static constexpr color_mode_bitmask_t build_mask_matching(uint8_t require_caps, uint8_t exclude_caps = 0) { + color_mode_bitmask_t mask = 0; + // Check each mode to see if it matches the requirements + // Skip UNKNOWN (bit 0), iterate through actual color modes (bits 1-9) + for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { + ColorMode mode = bit_to_mode(bit); + uint8_t mode_val = static_cast(mode); + // Mode matches if it has all required caps and none of the excluded caps + if ((mode_val & require_caps) == require_caps && (exclude_caps == 0 || (mode_val & exclude_caps) == 0)) { + mask |= (1 << bit); + } + } + return mask; + } private: // Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan). // Currently only 10 ColorMode values exist, so 16 bits is sufficient. // Can be changed to uint32_t if more than 16 color modes are needed in the future. // Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes). - uint16_t mask_{0}; + color_mode_bitmask_t mask_{0}; + + // Constants for ColorMode count and bit range + static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE + static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type /// Map ColorMode enum values to bit positions (0-9) static constexpr int mode_to_bit(ColorMode mode) { // Using switch instead of lookup table to avoid RAM usage on ESP8266 // The compiler optimizes this efficiently switch (mode) { - case ColorMode::UNKNOWN: + case ColorMode::UNKNOWN: // 0 return 0; - case ColorMode::ON_OFF: + case ColorMode::ON_OFF: // 1 return 1; - case ColorMode::BRIGHTNESS: + case ColorMode::BRIGHTNESS: // 3 return 2; - case ColorMode::WHITE: + case ColorMode::WHITE: // 7 return 3; - case ColorMode::COLOR_TEMPERATURE: + case ColorMode::COLOR_TEMPERATURE: // 11 return 4; - case ColorMode::COLD_WARM_WHITE: + case ColorMode::COLD_WARM_WHITE: // 19 return 5; - case ColorMode::RGB: + case ColorMode::RGB: // 35 return 6; - case ColorMode::RGB_WHITE: + case ColorMode::RGB_WHITE: // 39 return 7; - case ColorMode::RGB_COLOR_TEMPERATURE: + case ColorMode::RGB_COLOR_TEMPERATURE: // 47 return 8; - case ColorMode::RGB_COLD_WARM_WHITE: + case ColorMode::RGB_COLD_WARM_WHITE: // 51 return 9; default: return 0; @@ -215,25 +258,25 @@ class ColorModeMask { // Using switch instead of lookup table to avoid RAM usage on ESP8266 switch (bit) { case 0: - return ColorMode::UNKNOWN; + return ColorMode::UNKNOWN; // 0 case 1: - return ColorMode::ON_OFF; + return ColorMode::ON_OFF; // 1 case 2: - return ColorMode::BRIGHTNESS; + return ColorMode::BRIGHTNESS; // 3 case 3: - return ColorMode::WHITE; + return ColorMode::WHITE; // 7 case 4: - return ColorMode::COLOR_TEMPERATURE; + return ColorMode::COLOR_TEMPERATURE; // 11 case 5: - return ColorMode::COLD_WARM_WHITE; + return ColorMode::COLD_WARM_WHITE; // 19 case 6: - return ColorMode::RGB; + return ColorMode::RGB; // 35 case 7: - return ColorMode::RGB_WHITE; + return ColorMode::RGB_WHITE; // 39 case 8: - return ColorMode::RGB_COLOR_TEMPERATURE; + return ColorMode::RGB_COLOR_TEMPERATURE; // 47 case 9: - return ColorMode::RGB_COLD_WARM_WHITE; + return ColorMode::RGB_COLD_WARM_WHITE; // 51 default: return ColorMode::UNKNOWN; } diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 4e6251492d..fbc1b8f97d 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -425,20 +425,19 @@ ColorMode LightCall::compute_color_mode_() { // If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. - ColorModeMask suitable_modes = this->get_suitable_color_modes_(); + // Compute intersection of suitable and supported modes using bitwise AND + color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask(); - // Don't change if the current mode is suitable. - if (suitable_modes.contains(current_mode)) { + // Don't change if the current mode is in the intersection (suitable AND supported) + if (ColorModeMask::mask_contains(intersection, current_mode)) { ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(current_mode))); return current_mode; } // Use the preferred suitable mode. - for (auto mode : suitable_modes) { - if (!supported_modes.contains(mode)) - continue; - + if (intersection != 0) { + ColorMode mode = ColorModeMask::first_mode_from_mask(intersection); ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(mode))); return mode; @@ -451,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -ColorModeMask LightCall::get_suitable_color_modes_() { +color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = @@ -459,39 +458,27 @@ ColorModeMask LightCall::get_suitable_color_modes_() { bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) || (this->has_red() || this->has_green() || this->has_blue()); -// Build key from flags: [rgb][cwww][ct][white] -#define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3) + // Build required capabilities mask + uint8_t require_caps = static_cast(ColorCapability::ON_OFF | ColorCapability::BRIGHTNESS); + if (has_rgb) + require_caps |= static_cast(ColorCapability::RGB); + if (has_white) + require_caps |= static_cast(ColorCapability::WHITE); + if (has_ct) + require_caps |= static_cast(ColorCapability::COLOR_TEMPERATURE); + if (has_cwww) + require_caps |= static_cast(ColorCapability::COLD_WARM_WHITE); - uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb); - - switch (key) { - case KEY(true, false, false, false): // white only - return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, true, false, false): // ct only - return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(true, true, false, false): // white + ct - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, false, true, false): // cwww only - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, false, false, false): // none - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB, - ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}; - case KEY(true, false, false, true): // rgb + white - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, true, false, true): // rgb + ct - case KEY(true, true, false, true): // rgb + white + ct - return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, false, true, true): // rgb + cwww - return {ColorMode::RGB_COLD_WARM_WHITE}; - case KEY(false, false, false, true): // rgb only - return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; - default: - return {}; // conflicting flags + // If no specific color parameters set, exclude modes with color capabilities + uint8_t exclude_caps = 0; + if (!has_rgb && !has_white && !has_ct && !has_cwww) { + // For "none" case, we want all modes but don't exclude anything + // Just require ON_OFF + BRIGHTNESS which all modes have + return ColorModeMask::build_mask_matching( + static_cast(ColorCapability::ON_OFF | ColorCapability::BRIGHTNESS), exclude_caps); } -#undef KEY + return ColorModeMask::build_mask_matching(require_caps, exclude_caps); } LightCall &LightCall::set_effect(const std::string &effect) { diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index e87ccd3efd..6931b58b9d 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -185,8 +185,8 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); - /// Get potential color modes for this light call. - ColorModeMask get_suitable_color_modes_(); + /// Get potential color modes bitmask for this light call. + color_mode_bitmask_t get_suitable_color_modes_mask_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); From 89c719d71d6e0a488dc05a889a162d8c51f45531 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:10:41 -1000 Subject: [PATCH 105/336] preen --- esphome/components/light/color_mode.h | 13 +++++++++++++ esphome/components/light/light_traits.h | 6 +----- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 059996b740..c542984d1b 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -194,6 +194,19 @@ class ColorModeMask { return (mask & (1 << mode_to_bit(mode))) != 0; } + /// Check if any mode in the bitmask has a specific capability + /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) + bool has_capability(ColorCapability capability) const { + uint8_t cap_mask = static_cast(capability); + // Check each set bit to see if any mode has this capability + for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { + if ((this->mask_ & (1 << bit)) && (static_cast(bit_to_mode(bit)) & cap_mask)) { + return true; + } + } + return false; + } + /// Build a bitmask of modes that match the given capability requirements /// @param require_caps Capabilities that must be present in the mode /// @param exclude_caps Capabilities that must not be present in the mode (for none case) diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index 0db028598c..c83d8ad2a9 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -28,11 +28,7 @@ class LightTraits { bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); } bool supports_color_capability(ColorCapability color_capability) const { - for (auto mode : this->supported_color_modes_) { - if (mode & color_capability) - return true; - } - return false; + return this->supported_color_modes_.has_capability(color_capability); } ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21") From ec8d8538f64e3b705012ccf70a8fc4ce708147d9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:12:48 -1000 Subject: [PATCH 106/336] preen --- esphome/components/light/color_mode.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index c542984d1b..85e7a18406 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -198,9 +198,9 @@ class ColorModeMask { /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) bool has_capability(ColorCapability capability) const { uint8_t cap_mask = static_cast(capability); - // Check each set bit to see if any mode has this capability - for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { - if ((this->mask_ & (1 << bit)) && (static_cast(bit_to_mode(bit)) & cap_mask)) { + // Iterate through each mode and check if it has the capability + for (auto mode : *this) { + if (static_cast(mode) & cap_mask) { return true; } } From 80fd51e198a17c74c0b53af5566c7f79a8e8e0be Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:14:05 -1000 Subject: [PATCH 107/336] preen --- esphome/components/light/color_mode.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 85e7a18406..4fc65ac5f0 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -219,7 +219,7 @@ class ColorModeMask { ColorMode mode = bit_to_mode(bit); uint8_t mode_val = static_cast(mode); // Mode matches if it has all required caps and none of the excluded caps - if ((mode_val & require_caps) == require_caps && (exclude_caps == 0 || (mode_val & exclude_caps) == 0)) { + if ((mode_val & require_caps) == require_caps && (mode_val & exclude_caps) == 0) { mask |= (1 << bit); } } From cc6b798f2ba69bd01c9591dd3f7d6a044449be94 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:15:47 -1000 Subject: [PATCH 108/336] overkill --- esphome/components/light/color_mode.h | 7 +++---- esphome/components/light/light_call.cpp | 11 +---------- 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 4fc65ac5f0..a91df200c9 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -209,17 +209,16 @@ class ColorModeMask { /// Build a bitmask of modes that match the given capability requirements /// @param require_caps Capabilities that must be present in the mode - /// @param exclude_caps Capabilities that must not be present in the mode (for none case) /// @return Raw bitmask value - static constexpr color_mode_bitmask_t build_mask_matching(uint8_t require_caps, uint8_t exclude_caps = 0) { + static constexpr color_mode_bitmask_t build_mask_matching(uint8_t require_caps) { color_mode_bitmask_t mask = 0; // Check each mode to see if it matches the requirements // Skip UNKNOWN (bit 0), iterate through actual color modes (bits 1-9) for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { ColorMode mode = bit_to_mode(bit); uint8_t mode_val = static_cast(mode); - // Mode matches if it has all required caps and none of the excluded caps - if ((mode_val & require_caps) == require_caps && (mode_val & exclude_caps) == 0) { + // Mode matches if it has all required caps + if ((mode_val & require_caps) == require_caps) { mask |= (1 << bit); } } diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index fbc1b8f97d..036de98639 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -469,16 +469,7 @@ color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { if (has_cwww) require_caps |= static_cast(ColorCapability::COLD_WARM_WHITE); - // If no specific color parameters set, exclude modes with color capabilities - uint8_t exclude_caps = 0; - if (!has_rgb && !has_white && !has_ct && !has_cwww) { - // For "none" case, we want all modes but don't exclude anything - // Just require ON_OFF + BRIGHTNESS which all modes have - return ColorModeMask::build_mask_matching( - static_cast(ColorCapability::ON_OFF | ColorCapability::BRIGHTNESS), exclude_caps); - } - - return ColorModeMask::build_mask_matching(require_caps, exclude_caps); + return ColorModeMask::build_mask_matching(require_caps); } LightCall &LightCall::set_effect(const std::string &effect) { From 44d3f355a5b4e72b8115dc8df3abbb981b9b0047 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:16:52 -1000 Subject: [PATCH 109/336] overkill --- esphome/components/light/color_mode.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index a91df200c9..a154397aea 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -215,8 +215,7 @@ class ColorModeMask { // Check each mode to see if it matches the requirements // Skip UNKNOWN (bit 0), iterate through actual color modes (bits 1-9) for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { - ColorMode mode = bit_to_mode(bit); - uint8_t mode_val = static_cast(mode); + uint8_t mode_val = static_cast(bit_to_mode(bit)); // Mode matches if it has all required caps if ((mode_val & require_caps) == require_caps) { mask |= (1 << bit); From 1c8b60891c9a3b9b6c8b4e9c011ef43a2c8c5751 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:32:48 -1000 Subject: [PATCH 110/336] simplify --- esphome/components/light/color_mode.h | 168 +++++++++++++++----------- 1 file changed, 99 insertions(+), 69 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index a154397aea..c2b1a860ec 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -107,6 +107,97 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { // Type alias for raw color mode bitmask values using color_mode_bitmask_t = uint16_t; +// Constants for ColorMode count and bit range +static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE +static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type + +/// Map ColorMode enum values to bit positions (0-9) +static constexpr int mode_to_bit(ColorMode mode) { + // Using switch instead of lookup table to avoid RAM usage on ESP8266 + // The compiler optimizes this efficiently + switch (mode) { + case ColorMode::UNKNOWN: // 0 + return 0; + case ColorMode::ON_OFF: // 1 + return 1; + case ColorMode::BRIGHTNESS: // 3 + return 2; + case ColorMode::WHITE: // 7 + return 3; + case ColorMode::COLOR_TEMPERATURE: // 11 + return 4; + case ColorMode::COLD_WARM_WHITE: // 19 + return 5; + case ColorMode::RGB: // 35 + return 6; + case ColorMode::RGB_WHITE: // 39 + return 7; + case ColorMode::RGB_COLOR_TEMPERATURE: // 47 + return 8; + case ColorMode::RGB_COLD_WARM_WHITE: // 51 + return 9; + default: + return 0; + } +} + +static constexpr ColorMode bit_to_mode(int bit) { + // Using switch instead of lookup table to avoid RAM usage on ESP8266 + switch (bit) { + case 0: + return ColorMode::UNKNOWN; // 0 + case 1: + return ColorMode::ON_OFF; // 1 + case 2: + return ColorMode::BRIGHTNESS; // 3 + case 3: + return ColorMode::WHITE; // 7 + case 4: + return ColorMode::COLOR_TEMPERATURE; // 11 + case 5: + return ColorMode::COLD_WARM_WHITE; // 19 + case 6: + return ColorMode::RGB; // 35 + case 7: + return ColorMode::RGB_WHITE; // 39 + case 8: + return ColorMode::RGB_COLOR_TEMPERATURE; // 47 + case 9: + return ColorMode::RGB_COLD_WARM_WHITE; // 51 + default: + return ColorMode::UNKNOWN; + } +} + +/// Helper to compute capability bitmask at compile time +static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability capability) { + color_mode_bitmask_t mask = 0; + uint8_t cap_bit = static_cast(capability); + + // Check each ColorMode to see if it has this capability + for (int bit = 0; bit < COLOR_MODE_COUNT; ++bit) { + uint8_t mode_val = static_cast(bit_to_mode(bit)); + if ((mode_val & cap_bit) != 0) { + mask |= (1 << bit); + } + } + return mask; +} + +// Number of ColorCapability enum values +static constexpr int COLOR_CAPABILITY_COUNT = 6; + +/// Compile-time lookup table mapping ColorCapability to bitmask +/// This array is computed at compile time using constexpr +static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = { + compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0 + compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1 + compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2 + compute_capability_bitmask(ColorCapability::COLOR_TEMPERATURE), // 1 << 3 + compute_capability_bitmask(ColorCapability::COLD_WARM_WHITE), // 1 << 4 + compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 +}; + /// Bitmask for storing a set of ColorMode values efficiently. /// Replaces std::set to eliminate red-black tree overhead (~586 bytes). class ColorModeMask { @@ -197,14 +288,15 @@ class ColorModeMask { /// Check if any mode in the bitmask has a specific capability /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) bool has_capability(ColorCapability capability) const { - uint8_t cap_mask = static_cast(capability); - // Iterate through each mode and check if it has the capability - for (auto mode : *this) { - if (static_cast(mode) & cap_mask) { - return true; - } + // Convert capability bit to array index (log2 of the bit value) + uint8_t cap_bit = static_cast(capability); + // Count trailing zeros to get the bit position (0-5) + int index = 0; + while (index < COLOR_CAPABILITY_COUNT && !(cap_bit & (1 << index))) { + ++index; } - return false; + // Look up the pre-computed bitmask and check if any of our set bits match + return (index < COLOR_CAPABILITY_COUNT) && ((this->mask_ & CAPABILITY_BITMASKS[index]) != 0); } /// Build a bitmask of modes that match the given capability requirements @@ -230,68 +322,6 @@ class ColorModeMask { // Can be changed to uint32_t if more than 16 color modes are needed in the future. // Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes). color_mode_bitmask_t mask_{0}; - - // Constants for ColorMode count and bit range - static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE - static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type - - /// Map ColorMode enum values to bit positions (0-9) - static constexpr int mode_to_bit(ColorMode mode) { - // Using switch instead of lookup table to avoid RAM usage on ESP8266 - // The compiler optimizes this efficiently - switch (mode) { - case ColorMode::UNKNOWN: // 0 - return 0; - case ColorMode::ON_OFF: // 1 - return 1; - case ColorMode::BRIGHTNESS: // 3 - return 2; - case ColorMode::WHITE: // 7 - return 3; - case ColorMode::COLOR_TEMPERATURE: // 11 - return 4; - case ColorMode::COLD_WARM_WHITE: // 19 - return 5; - case ColorMode::RGB: // 35 - return 6; - case ColorMode::RGB_WHITE: // 39 - return 7; - case ColorMode::RGB_COLOR_TEMPERATURE: // 47 - return 8; - case ColorMode::RGB_COLD_WARM_WHITE: // 51 - return 9; - default: - return 0; - } - } - - static constexpr ColorMode bit_to_mode(int bit) { - // Using switch instead of lookup table to avoid RAM usage on ESP8266 - switch (bit) { - case 0: - return ColorMode::UNKNOWN; // 0 - case 1: - return ColorMode::ON_OFF; // 1 - case 2: - return ColorMode::BRIGHTNESS; // 3 - case 3: - return ColorMode::WHITE; // 7 - case 4: - return ColorMode::COLOR_TEMPERATURE; // 11 - case 5: - return ColorMode::COLD_WARM_WHITE; // 19 - case 6: - return ColorMode::RGB; // 35 - case 7: - return ColorMode::RGB_WHITE; // 39 - case 8: - return ColorMode::RGB_COLOR_TEMPERATURE; // 47 - case 9: - return ColorMode::RGB_COLD_WARM_WHITE; // 51 - default: - return ColorMode::UNKNOWN; - } - } }; } // namespace light From 8545b5231bf6e6b7927ba219bd0b09810fc5cced Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:38:45 -1000 Subject: [PATCH 111/336] preen --- esphome/components/light/color_mode.h | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index c2b1a860ec..7c7239a5af 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -288,15 +288,16 @@ class ColorModeMask { /// Check if any mode in the bitmask has a specific capability /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) bool has_capability(ColorCapability capability) const { - // Convert capability bit to array index (log2 of the bit value) - uint8_t cap_bit = static_cast(capability); - // Count trailing zeros to get the bit position (0-5) + // Lookup the pre-computed bitmask for this capability and check intersection with our mask + // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 + // We need to convert the power-of-2 value to an index + uint8_t cap_val = static_cast(capability); int index = 0; - while (index < COLOR_CAPABILITY_COUNT && !(cap_bit & (1 << index))) { + while (cap_val > 1) { + cap_val >>= 1; ++index; } - // Look up the pre-computed bitmask and check if any of our set bits match - return (index < COLOR_CAPABILITY_COUNT) && ((this->mask_ & CAPABILITY_BITMASKS[index]) != 0); + return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0; } /// Build a bitmask of modes that match the given capability requirements From a249c9c28290b54e0f143711c97b09ba4f5cdade Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:46:49 -1000 Subject: [PATCH 112/336] preen --- esphome/components/light/color_mode.h | 24 +++------- esphome/components/light/light_call.cpp | 58 ++++++++++++++++++++----- 2 files changed, 54 insertions(+), 28 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 7c7239a5af..97e61e2a1c 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -213,6 +213,13 @@ class ColorModeMask { constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); } + /// Add multiple modes at once using initializer list + constexpr void add(std::initializer_list modes) { + for (auto mode : modes) { + this->add(mode); + } + } + constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; } constexpr size_t size() const { @@ -300,23 +307,6 @@ class ColorModeMask { return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0; } - /// Build a bitmask of modes that match the given capability requirements - /// @param require_caps Capabilities that must be present in the mode - /// @return Raw bitmask value - static constexpr color_mode_bitmask_t build_mask_matching(uint8_t require_caps) { - color_mode_bitmask_t mask = 0; - // Check each mode to see if it matches the requirements - // Skip UNKNOWN (bit 0), iterate through actual color modes (bits 1-9) - for (int bit = 1; bit < COLOR_MODE_COUNT; ++bit) { - uint8_t mode_val = static_cast(bit_to_mode(bit)); - // Mode matches if it has all required caps - if ((mode_val & require_caps) == require_caps) { - mask |= (1 << bit); - } - } - return mask; - } - private: // Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan). // Currently only 10 ColorMode values exist, so 16 bits is sufficient. diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 036de98639..f209f26005 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -458,18 +458,54 @@ color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) || (this->has_red() || this->has_green() || this->has_blue()); - // Build required capabilities mask - uint8_t require_caps = static_cast(ColorCapability::ON_OFF | ColorCapability::BRIGHTNESS); - if (has_rgb) - require_caps |= static_cast(ColorCapability::RGB); - if (has_white) - require_caps |= static_cast(ColorCapability::WHITE); - if (has_ct) - require_caps |= static_cast(ColorCapability::COLOR_TEMPERATURE); - if (has_cwww) - require_caps |= static_cast(ColorCapability::COLD_WARM_WHITE); + // Build key from flags: [rgb][cwww][ct][white] +#define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3) - return ColorModeMask::build_mask_matching(require_caps); + uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb); + + // Build bitmask from suitable ColorModes + ColorModeMask suitable; + + switch (key) { + case KEY(true, false, false, false): // white only + suitable.add({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(false, true, false, false): // ct only + suitable.add({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(true, true, false, false): // white + ct + suitable.add({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(false, false, true, false): // cwww only + suitable.add({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(false, false, false, false): // none + suitable.add({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, + ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}); + break; + case KEY(true, false, false, true): // rgb + white + suitable.add({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(false, true, false, true): // rgb + ct + case KEY(true, true, false, true): // rgb + white + ct + suitable.add({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + case KEY(false, false, true, true): // rgb + cwww + suitable.add(ColorMode::RGB_COLD_WARM_WHITE); + break; + case KEY(false, false, false, true): // rgb only + suitable.add( + {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); + break; + default: + break; // conflicting flags - return empty mask + } + +#undef KEY + + return suitable.get_mask(); } LightCall &LightCall::set_effect(const std::string &effect) { From 2cdfd04204403151f74f47d56ff585e2401bbe05 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:53:05 -1000 Subject: [PATCH 113/336] dry --- esphome/components/light/color_mode.h | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 97e61e2a1c..70d940ec54 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -259,11 +259,7 @@ class ColorModeMask { constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } private: - constexpr void advance_to_next_set_bit_() { - while (bit_ < MAX_BIT_INDEX && !(mask_ & (1 << bit_))) { - ++bit_; - } - } + constexpr void advance_to_next_set_bit_() { bit_ = ColorModeMask::find_next_set_bit(mask_, bit_); } color_mode_bitmask_t mask_; int bit_; @@ -275,15 +271,20 @@ class ColorModeMask { /// Get the raw bitmask value for API encoding constexpr color_mode_bitmask_t get_mask() const { return this->mask_; } - /// Find the first set bit in a bitmask and return the corresponding ColorMode - /// Used for optimizing compute_color_mode_() intersection logic - static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) { - // Find the position of the first set bit (least significant bit) - int bit = 0; + /// Find the next set bit in a bitmask starting from a given position + /// Returns the bit position, or MAX_BIT_INDEX if no more bits are set + static constexpr int find_next_set_bit(color_mode_bitmask_t mask, int start_bit) { + int bit = start_bit; while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) { ++bit; } - return bit_to_mode(bit); + return bit; + } + + /// Find the first set bit in a bitmask and return the corresponding ColorMode + /// Used for optimizing compute_color_mode_() intersection logic + static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) { + return bit_to_mode(find_next_set_bit(mask, 0)); } /// Check if a ColorMode is present in a raw bitmask value From f2d01ecd6c7e47896d09dee4534dd0d112e6165c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 13:58:52 -1000 Subject: [PATCH 114/336] dry --- esphome/components/light/color_mode.h | 76 +++++++++------------------ 1 file changed, 26 insertions(+), 50 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 70d940ec54..1583bde4d3 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -111,62 +111,38 @@ using color_mode_bitmask_t = uint16_t; static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type +// Compile-time array of all ColorMode values in declaration order +// Bit positions (0-9) map directly to enum declaration order +static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = { + ColorMode::UNKNOWN, // bit 0 + ColorMode::ON_OFF, // bit 1 + ColorMode::BRIGHTNESS, // bit 2 + ColorMode::WHITE, // bit 3 + ColorMode::COLOR_TEMPERATURE, // bit 4 + ColorMode::COLD_WARM_WHITE, // bit 5 + ColorMode::RGB, // bit 6 + ColorMode::RGB_WHITE, // bit 7 + ColorMode::RGB_COLOR_TEMPERATURE, // bit 8 + ColorMode::RGB_COLD_WARM_WHITE, // bit 9 +}; + /// Map ColorMode enum values to bit positions (0-9) +/// Bit positions follow the enum declaration order static constexpr int mode_to_bit(ColorMode mode) { - // Using switch instead of lookup table to avoid RAM usage on ESP8266 - // The compiler optimizes this efficiently - switch (mode) { - case ColorMode::UNKNOWN: // 0 - return 0; - case ColorMode::ON_OFF: // 1 - return 1; - case ColorMode::BRIGHTNESS: // 3 - return 2; - case ColorMode::WHITE: // 7 - return 3; - case ColorMode::COLOR_TEMPERATURE: // 11 - return 4; - case ColorMode::COLD_WARM_WHITE: // 19 - return 5; - case ColorMode::RGB: // 35 - return 6; - case ColorMode::RGB_WHITE: // 39 - return 7; - case ColorMode::RGB_COLOR_TEMPERATURE: // 47 - return 8; - case ColorMode::RGB_COLD_WARM_WHITE: // 51 - return 9; - default: - return 0; + // Linear search through COLOR_MODES array + // Compiler optimizes this to efficient code since array is constexpr + for (int i = 0; i < COLOR_MODE_COUNT; ++i) { + if (COLOR_MODES[i] == mode) + return i; } + return 0; } +/// Map bit positions (0-9) to ColorMode enum values +/// Bit positions follow the enum declaration order static constexpr ColorMode bit_to_mode(int bit) { - // Using switch instead of lookup table to avoid RAM usage on ESP8266 - switch (bit) { - case 0: - return ColorMode::UNKNOWN; // 0 - case 1: - return ColorMode::ON_OFF; // 1 - case 2: - return ColorMode::BRIGHTNESS; // 3 - case 3: - return ColorMode::WHITE; // 7 - case 4: - return ColorMode::COLOR_TEMPERATURE; // 11 - case 5: - return ColorMode::COLD_WARM_WHITE; // 19 - case 6: - return ColorMode::RGB; // 35 - case 7: - return ColorMode::RGB_WHITE; // 39 - case 8: - return ColorMode::RGB_COLOR_TEMPERATURE; // 47 - case 9: - return ColorMode::RGB_COLD_WARM_WHITE; // 51 - default: - return ColorMode::UNKNOWN; - } + // Direct lookup in COLOR_MODES array + return (bit >= 0 && bit < COLOR_MODE_COUNT) ? COLOR_MODES[bit] : ColorMode::UNKNOWN; } /// Helper to compute capability bitmask at compile time From 764428870d7a2520adcc2e93f419c99af01a4a3e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 14:11:23 -1000 Subject: [PATCH 115/336] reduce diff --- esphome/components/light/light_call.cpp | 46 +++++++++---------------- esphome/components/light/light_call.h | 2 +- 2 files changed, 17 insertions(+), 31 deletions(-) diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index f209f26005..5ca2f24d0e 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -426,7 +426,8 @@ ColorMode LightCall::compute_color_mode_() { // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. // Compute intersection of suitable and supported modes using bitwise AND - color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask(); + ColorModeMask suitable = this->get_suitable_color_modes_(); + color_mode_bitmask_t intersection = suitable.get_mask() & supported_modes.get_mask(); // Don't change if the current mode is in the intersection (suitable AND supported) if (ColorModeMask::mask_contains(intersection, current_mode)) { @@ -450,7 +451,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { +ColorModeMask LightCall::get_suitable_color_modes_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = @@ -463,49 +464,34 @@ color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb); - // Build bitmask from suitable ColorModes - ColorModeMask suitable; - switch (key) { case KEY(true, false, false, false): // white only - suitable.add({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, - ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, true, false, false): // ct only - suitable.add({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}; case KEY(true, true, false, false): // white + ct - suitable.add({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, false, true, false): // cwww only - suitable.add({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, false, false, false): // none - suitable.add({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, - ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}); - break; + return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB, + ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}; case KEY(true, false, false, true): // rgb + white - suitable.add({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, true, false, true): // rgb + ct case KEY(true, true, false, true): // rgb + white + ct - suitable.add({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, false, true, true): // rgb + cwww - suitable.add(ColorMode::RGB_COLD_WARM_WHITE); - break; + return {ColorMode::RGB_COLD_WARM_WHITE}; case KEY(false, false, false, true): // rgb only - suitable.add( - {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}); - break; + return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; default: - break; // conflicting flags - return empty mask + return {}; // conflicting flags } #undef KEY - - return suitable.get_mask(); } LightCall &LightCall::set_effect(const std::string &effect) { diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index 6931b58b9d..f34feadefe 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -186,7 +186,7 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); /// Get potential color modes bitmask for this light call. - color_mode_bitmask_t get_suitable_color_modes_mask_(); + ColorModeMask get_suitable_color_modes_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); From 32eb43fd02ae743fda3e2b5cc251cba663010d1b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 14:14:48 -1000 Subject: [PATCH 116/336] preen --- esphome/components/light/light_call.cpp | 39 +++++++++++++++---------- esphome/components/light/light_call.h | 2 +- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 5ca2f24d0e..89910d851b 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -426,8 +426,7 @@ ColorMode LightCall::compute_color_mode_() { // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. // Compute intersection of suitable and supported modes using bitwise AND - ColorModeMask suitable = this->get_suitable_color_modes_(); - color_mode_bitmask_t intersection = suitable.get_mask() & supported_modes.get_mask(); + color_mode_bitmask_t intersection = this->get_suitable_color_modes_() & supported_modes.get_mask(); // Don't change if the current mode is in the intersection (suitable AND supported) if (ColorModeMask::mask_contains(intersection, current_mode)) { @@ -451,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -ColorModeMask LightCall::get_suitable_color_modes_() { +color_mode_bitmask_t LightCall::get_suitable_color_modes_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = @@ -466,29 +465,37 @@ ColorModeMask LightCall::get_suitable_color_modes_() { switch (key) { case KEY(true, false, false, false): // white only - return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, false): // ct only - return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(true, true, false, false): // white + ct - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask( + {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, false, true, false): // cwww only - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, false): // none - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB, - ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, + ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}) + .get_mask(); case KEY(true, false, false, true): // rgb + white - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, true): // rgb + ct case KEY(true, true, false, true): // rgb + white + ct - return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, true, true): // rgb + cwww - return {ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, true): // rgb only - return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); default: - return {}; // conflicting flags + return 0; // conflicting flags } #undef KEY diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index f34feadefe..e25b26731f 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -186,7 +186,7 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); /// Get potential color modes bitmask for this light call. - ColorModeMask get_suitable_color_modes_(); + color_mode_bitmask_t get_suitable_color_modes_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); From 1381db37adc2b6ed5c984b64d51a0e21b1e091ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 14:18:17 -1000 Subject: [PATCH 117/336] preen --- esphome/components/light/light_call.cpp | 4 ++-- esphome/components/light/light_call.h | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 89910d851b..af193e1f11 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -426,7 +426,7 @@ ColorMode LightCall::compute_color_mode_() { // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. // Compute intersection of suitable and supported modes using bitwise AND - color_mode_bitmask_t intersection = this->get_suitable_color_modes_() & supported_modes.get_mask(); + color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask(); // Don't change if the current mode is in the intersection (suitable AND supported) if (ColorModeMask::mask_contains(intersection, current_mode)) { @@ -450,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -color_mode_bitmask_t LightCall::get_suitable_color_modes_() { +color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index e25b26731f..6931b58b9d 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -186,7 +186,7 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); /// Get potential color modes bitmask for this light call. - color_mode_bitmask_t get_suitable_color_modes_(); + color_mode_bitmask_t get_suitable_color_modes_mask_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); From 437dd503ca0f4bd346407a9c6995b20e46cfd2ae Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 14:21:52 -1000 Subject: [PATCH 118/336] more cover --- tests/integration/test_light_calls.py | 57 ++++++++++++++++----------- 1 file changed, 35 insertions(+), 22 deletions(-) diff --git a/tests/integration/test_light_calls.py b/tests/integration/test_light_calls.py index 152896ba88..0eaf5af91b 100644 --- a/tests/integration/test_light_calls.py +++ b/tests/integration/test_light_calls.py @@ -36,37 +36,50 @@ async def test_light_calls( # Get the light entities entities = await client.list_entities_services() lights = [e for e in entities[0] if e.object_id.startswith("test_")] - assert len(lights) >= 2 # Should have RGBCW and RGB lights + assert len(lights) >= 3 # Should have RGBCW, RGB, and Binary lights rgbcw_light = next(light for light in lights if "RGBCW" in light.name) rgb_light = next(light for light in lights if "RGB Light" in light.name) + binary_light = next(light for light in lights if "Binary" in light.name) # Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values - # not bit positions. This is critical - the bug was encoding bit position 6 instead of - # ColorMode.RGB (value 35). + # not bit positions. This is critical - the iterator must convert bit positions to actual + # ColorMode enum values for API encoding. - # RGB light should support RGB mode (ColorMode.RGB = 35) - assert ColorMode.RGB in rgb_light.supported_color_modes, ( - f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}" - ) - # Verify it's the actual enum value, not a bit position - assert 35 in [mode.value for mode in rgb_light.supported_color_modes], ( - f"RGB light has wrong color mode values. Expected 35 (RGB), got: " - f"{[mode.value for mode in rgb_light.supported_color_modes]}" - ) - - # RGBCW light should support multiple modes including RGB_COLD_WARM_WHITE (value 51) + # RGBCW light (rgbww platform) should support RGB_COLD_WARM_WHITE mode assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, ( f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}" ) - # Verify actual enum values - expected_rgbcw_modes = { - ColorMode.RGB_COLD_WARM_WHITE, # 51 - # May have other modes too - } - assert expected_rgbcw_modes.issubset(set(rgbcw_light.supported_color_modes)), ( - f"RGBCW light missing expected color modes. Got: " - f"{[f'{mode.name}={mode.value}' for mode in rgbcw_light.supported_color_modes]}" + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB_COLD_WARM_WHITE.value in [ + mode.value for mode in rgbcw_light.supported_color_modes + ], ( + f"RGBCW light has wrong color mode values. Expected {ColorMode.RGB_COLD_WARM_WHITE.value} " + f"(RGB_COLD_WARM_WHITE), got: {[mode.value for mode in rgbcw_light.supported_color_modes]}" + ) + + # RGB light should support RGB mode + assert ColorMode.RGB in rgb_light.supported_color_modes, ( + f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB.value in [ + mode.value for mode in rgb_light.supported_color_modes + ], ( + f"RGB light has wrong color mode values. Expected {ColorMode.RGB.value} (RGB), got: " + f"{[mode.value for mode in rgb_light.supported_color_modes]}" + ) + + # Binary light (on/off only) should support ON_OFF mode + assert ColorMode.ON_OFF in binary_light.supported_color_modes, ( + f"Binary light missing ON_OFF color mode. Got: {binary_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.ON_OFF.value in [ + mode.value for mode in binary_light.supported_color_modes + ], ( + f"Binary light has wrong color mode values. Expected {ColorMode.ON_OFF.value} (ON_OFF), got: " + f"{[mode.value for mode in binary_light.supported_color_modes]}" ) async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any: From f7d52a342bd29049b5b0b355a6536aa70bc6eb18 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 14:41:59 -1000 Subject: [PATCH 119/336] review comments --- esphome/components/light/color_mode.h | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 1583bde4d3..a26f917167 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -199,12 +199,13 @@ class ColorModeMask { constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; } constexpr size_t size() const { - // Count set bits + // Count set bits using Brian Kernighan's algorithm + // More efficient for sparse bitmasks (typical case: 2-4 modes out of 10) uint16_t n = this->mask_; size_t count = 0; while (n) { - count += n & 1; - n >>= 1; + n &= n - 1; // Clear the least significant set bit + count++; } return count; } @@ -276,11 +277,17 @@ class ColorModeMask { // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 // We need to convert the power-of-2 value to an index uint8_t cap_val = static_cast(capability); +#if defined(__GNUC__) || defined(__clang__) + // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) + int index = __builtin_ctz(cap_val); +#else + // Fallback for compilers without __builtin_ctz int index = 0; while (cap_val > 1) { cap_val >>= 1; ++index; } +#endif return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0; } From b378038253f7a015cadf36fc4407ecde3f9774ff Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 20:05:22 -1000 Subject: [PATCH 120/336] [esp32_ble_client] Remove duplicate MAC address extraction in set_address() --- esphome/components/esp32_ble_client/ble_client_base.h | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/esphome/components/esp32_ble_client/ble_client_base.h b/esphome/components/esp32_ble_client/ble_client_base.h index f2edd6c2b3..7f0ae3b83e 100644 --- a/esphome/components/esp32_ble_client/ble_client_base.h +++ b/esphome/components/esp32_ble_client/ble_client_base.h @@ -61,12 +61,7 @@ class BLEClientBase : public espbt::ESPBTClient, public Component { this->address_str_ = ""; } else { char buf[18]; - uint8_t mac[6] = { - (uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff), - (uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff), - (uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff), - }; - format_mac_addr_upper(mac, buf); + format_mac_addr_upper(this->remote_bda_, buf); this->address_str_ = buf; } } From 071bdfa67f7a33a7bd0795568c3763abf8065389 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 20:20:51 -1000 Subject: [PATCH 121/336] [bluetooth_proxy] Merge duplicate loops in get_connection_() --- .../components/bluetooth_proxy/bluetooth_proxy.cpp | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp index cd7261d5e5..34e0aa93a3 100644 --- a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp +++ b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp @@ -155,16 +155,12 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) { for (uint8_t i = 0; i < this->connection_count_; i++) { auto *connection = this->connections_[i]; - if (connection->get_address() == address) + uint64_t conn_addr = connection->get_address(); + + if (conn_addr == address) return connection; - } - if (!reserve) - return nullptr; - - for (uint8_t i = 0; i < this->connection_count_; i++) { - auto *connection = this->connections_[i]; - if (connection->get_address() == 0) { + if (reserve && conn_addr == 0) { connection->send_service_ = INIT_SENDING_SERVICES; connection->set_address(address); // All connections must start at INIT @@ -175,7 +171,6 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese return connection; } } - return nullptr; } From 6d1288c806a176e39d7e77a1cd6f245274db623a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 20:44:36 -1000 Subject: [PATCH 122/336] [mdns] Use FixedVector for TXT records to reduce ESP32 flash usage --- esphome/components/mdns/mdns_esp32.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/esphome/components/mdns/mdns_esp32.cpp b/esphome/components/mdns/mdns_esp32.cpp index f2cb2d3ef5..170a05a90e 100644 --- a/esphome/components/mdns/mdns_esp32.cpp +++ b/esphome/components/mdns/mdns_esp32.cpp @@ -31,7 +31,8 @@ void MDNSComponent::setup() { mdns_instance_name_set(this->hostname_.c_str()); for (const auto &service : services) { - std::vector txt_records; + FixedVector txt_records; + txt_records.init(service.txt_records.size()); for (const auto &record : service.txt_records) { mdns_txt_item_t it{}; // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_ @@ -42,7 +43,7 @@ void MDNSComponent::setup() { } uint16_t port = const_cast &>(service.port).value(); err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port, - txt_records.data(), txt_records.size()); + txt_records.begin(), txt_records.size()); if (err != ESP_OK) { ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err)); From 53d7b4f4333675b71d25d16aa724460de41e424b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:00:45 -1000 Subject: [PATCH 123/336] [wifi] Replace std::vector with std::unique_ptr for WiFi scan buffer --- esphome/components/wifi/wifi_component_esp_idf.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/esphome/components/wifi/wifi_component_esp_idf.cpp b/esphome/components/wifi/wifi_component_esp_idf.cpp index 951f5803a6..ce1cc961d0 100644 --- a/esphome/components/wifi/wifi_component_esp_idf.cpp +++ b/esphome/components/wifi/wifi_component_esp_idf.cpp @@ -776,13 +776,12 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) { } uint16_t number = it.number; - std::vector records(number); - err = esp_wifi_scan_get_ap_records(&number, records.data()); + auto records = std::make_unique(number); + err = esp_wifi_scan_get_ap_records(&number, records.get()); if (err != ESP_OK) { ESP_LOGW(TAG, "esp_wifi_scan_get_ap_records failed: %s", esp_err_to_name(err)); return; } - records.resize(number); scan_result_.init(number); for (int i = 0; i < number; i++) { From f036e894c8eb3c1e3cf48f273a35132454c077e2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:18:11 -1000 Subject: [PATCH 124/336] adjust --- esphome/components/mdns/mdns_esp32.cpp | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/esphome/components/mdns/mdns_esp32.cpp b/esphome/components/mdns/mdns_esp32.cpp index 170a05a90e..c02bfcbadb 100644 --- a/esphome/components/mdns/mdns_esp32.cpp +++ b/esphome/components/mdns/mdns_esp32.cpp @@ -31,19 +31,17 @@ void MDNSComponent::setup() { mdns_instance_name_set(this->hostname_.c_str()); for (const auto &service : services) { - FixedVector txt_records; - txt_records.init(service.txt_records.size()); - for (const auto &record : service.txt_records) { - mdns_txt_item_t it{}; + auto txt_records = std::make_unique(service.txt_records.size()); + for (size_t i = 0; i < service.txt_records.size(); i++) { + const auto &record = service.txt_records[i]; // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_ // Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies - it.key = MDNS_STR_ARG(record.key); - it.value = MDNS_STR_ARG(record.value); - txt_records.push_back(it); + txt_records[i].key = MDNS_STR_ARG(record.key); + txt_records[i].value = MDNS_STR_ARG(record.value); } uint16_t port = const_cast &>(service.port).value(); err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port, - txt_records.begin(), txt_records.size()); + txt_records.get(), service.txt_records.size()); if (err != ESP_OK) { ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err)); From f387d9ec50765b75b9037f6c6176d4a180a68baa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:33:38 -1000 Subject: [PATCH 125/336] unique ptr --- esphome/components/script/script.h | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 3a97a26985..26192b8997 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -140,8 +140,10 @@ template class QueueingScript : public Script, public Com void stop() override { // Clear all queued items to free memory immediately - for (int i = 0; i < this->max_runs_ - 1; i++) { - this->var_queue_[i].reset(); + if (this->var_queue_) { + for (int i = 0; i < this->max_runs_ - 1; i++) { + this->var_queue_[i].reset(); + } } this->num_queued_ = 0; this->queue_front_ = 0; @@ -164,13 +166,10 @@ template class QueueingScript : public Script, public Com // Lazy init queue on first use - avoids setup() ordering issues and saves memory // if script is never executed during this boot cycle inline void lazy_init_queue_() { - if (this->var_queue_.capacity() == 0) { - // Allocate max_runs_ - 1 slots for queued items (running item is separate) - this->var_queue_.init(this->max_runs_ - 1); - // Initialize all unique_ptr slots to nullptr - for (int i = 0; i < this->max_runs_ - 1; i++) { - this->var_queue_.push_back(nullptr); - } + if (!this->var_queue_) { + // Allocate array of max_runs_ - 1 slots for queued items (running item is separate) + // unique_ptr array is zero-initialized, so all slots start as nullptr + this->var_queue_ = std::make_unique>[]>(this->max_runs_ - 1); } } @@ -181,7 +180,7 @@ template class QueueingScript : public Script, public Com int num_queued_ = 0; // Number of queued instances (not including currently running) int max_runs_ = 0; // Maximum total instances (running + queued) size_t queue_front_ = 0; // Ring buffer read position (next item to execute) - FixedVector>> var_queue_; // Ring buffer of queued parameters + std::unique_ptr>[]> var_queue_; // Ring buffer of queued parameters }; /** A script type that executes new instances in parallel. From 0e513b41e4390ae5d3fc50a74896d080ec32cfa6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:45:02 -1000 Subject: [PATCH 126/336] preen --- esphome/components/script/script.h | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 26192b8997..86edd3b3e4 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -111,8 +111,6 @@ template class RestartScript : public Script { template class QueueingScript : public Script, public Component { public: void execute(Ts... x) override { - this->lazy_init_queue_(); - if (this->is_action_running() || this->num_queued_ > 0) { // num_queued_ is the number of *queued* instances (waiting, not including currently running) // max_runs_ is the maximum *total* instances (running + queued) @@ -123,12 +121,15 @@ template class QueueingScript : public Script, public Com return; } + // Initialize queue on first queued item (after capacity check) + this->lazy_init_queue_(); + this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"), LOG_STR_ARG(this->name_)); // Ring buffer: write to (queue_front_ + num_queued_) % (max_runs_ - 1) size_t write_pos = (this->queue_front_ + this->num_queued_) % (this->max_runs_ - 1); - // Use reset() to replace the unique_ptr - this->var_queue_[write_pos].reset(new std::tuple(std::make_tuple(x...))); + // Use std::make_unique to replace the unique_ptr + this->var_queue_[write_pos] = std::make_unique>(x...); this->num_queued_++; return; } @@ -144,6 +145,7 @@ template class QueueingScript : public Script, public Com for (int i = 0; i < this->max_runs_ - 1; i++) { this->var_queue_[i].reset(); } + this->var_queue_.reset(); } this->num_queued_ = 0; this->queue_front_ = 0; From f5e5f4ef06c335008a79d81a45783c69e6e77516 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:47:03 -1000 Subject: [PATCH 127/336] preen --- esphome/core/helpers.h | 1 - 1 file changed, 1 deletion(-) diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index dd67836653..326718e974 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -298,7 +298,6 @@ template class FixedVector { const T &back() const { return data_[size_ - 1]; } size_t size() const { return size_; } - size_t capacity() const { return capacity_; } bool empty() const { return size_ == 0; } /// Access element without bounds checking (matches std::vector behavior) From 7bb222a574dedea8b70522e8c9bbc904b0c52aa0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:51:51 -1000 Subject: [PATCH 128/336] Update esphome/components/script/script.h Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/components/script/script.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 86edd3b3e4..55967ead06 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -116,7 +116,7 @@ template class QueueingScript : public Script, public Com // max_runs_ is the maximum *total* instances (running + queued) // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) if (this->num_queued_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum number of queued runs exceeded!"), + this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), LOG_STR_ARG(this->name_)); return; } From e0477e3bb19149f0e7ec5c393c1a913b78ffdb2a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci-lite[bot]" <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> Date: Sun, 19 Oct 2025 07:53:21 +0000 Subject: [PATCH 129/336] [pre-commit.ci lite] apply automatic fixes --- esphome/components/script/script.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 55967ead06..bb26f5b9ef 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -116,7 +116,8 @@ template class QueueingScript : public Script, public Com // max_runs_ is the maximum *total* instances (running + queued) // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) if (this->num_queued_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), + this->esp_logw_(__LINE__, + ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), LOG_STR_ARG(this->name_)); return; } From 498dece3828281cc0658d95c73ea5db63812831c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:54:05 -1000 Subject: [PATCH 130/336] suggestions --- esphome/components/script/script.h | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 55967ead06..a69049840f 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -116,7 +116,8 @@ template class QueueingScript : public Script, public Com // max_runs_ is the maximum *total* instances (running + queued) // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) if (this->num_queued_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), + this->esp_logw_(__LINE__, + ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), LOG_STR_ARG(this->name_)); return; } @@ -126,8 +127,9 @@ template class QueueingScript : public Script, public Com this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"), LOG_STR_ARG(this->name_)); - // Ring buffer: write to (queue_front_ + num_queued_) % (max_runs_ - 1) - size_t write_pos = (this->queue_front_ + this->num_queued_) % (this->max_runs_ - 1); + // Ring buffer: write to (queue_front_ + num_queued_) % queue_capacity + const size_t queue_capacity = static_cast(this->max_runs_ - 1); + size_t write_pos = (this->queue_front_ + this->num_queued_) % queue_capacity; // Use std::make_unique to replace the unique_ptr this->var_queue_[write_pos] = std::make_unique>(x...); this->num_queued_++; @@ -142,7 +144,8 @@ template class QueueingScript : public Script, public Com void stop() override { // Clear all queued items to free memory immediately if (this->var_queue_) { - for (int i = 0; i < this->max_runs_ - 1; i++) { + const size_t queue_capacity = static_cast(this->max_runs_ - 1); + for (size_t i = 0; i < queue_capacity; i++) { this->var_queue_[i].reset(); } this->var_queue_.reset(); @@ -156,8 +159,9 @@ template class QueueingScript : public Script, public Com if (this->num_queued_ != 0 && !this->is_action_running()) { // Dequeue: decrement count, move tuple out (frees slot), advance read position this->num_queued_--; + const size_t queue_capacity = static_cast(this->max_runs_ - 1); auto tuple_ptr = std::move(this->var_queue_[this->queue_front_]); - this->queue_front_ = (this->queue_front_ + 1) % (this->max_runs_ - 1); + this->queue_front_ = (this->queue_front_ + 1) % queue_capacity; this->trigger_tuple_(*tuple_ptr, typename gens::type()); } } From 32a1e4584289df94b2f45d3c11601af0cfaf0801 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:54:20 -1000 Subject: [PATCH 131/336] suggestions --- tests/integration/test_script_queued.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_script_queued.py b/tests/integration/test_script_queued.py index 9f4bce6f31..cd6dbba7e2 100644 --- a/tests/integration/test_script_queued.py +++ b/tests/integration/test_script_queued.py @@ -32,7 +32,7 @@ async def test_script_queued( queue_start = re.compile(r"Queue test: START item (\d+)") queue_end = re.compile(r"Queue test: END item (\d+)") queue_reject = re.compile( - r"Script 'queue_depth_script' maximum number of queued runs exceeded!" + r"Script 'queue_depth_script' maximum total instances \(running \+ queued\) exceeded!" ) # Patterns for Test 2: Ring buffer From acdecafeef36067a7033f9f98058699177b8b604 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:55:25 -1000 Subject: [PATCH 132/336] suggestions --- tests/integration/test_script_queued.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_script_queued.py b/tests/integration/test_script_queued.py index cd6dbba7e2..9afaaf3286 100644 --- a/tests/integration/test_script_queued.py +++ b/tests/integration/test_script_queued.py @@ -47,7 +47,7 @@ async def test_script_queued( reject_start = re.compile(r"Rejection test: START (\d+)") reject_end = re.compile(r"Rejection test: END (\d+)") reject_reject = re.compile( - r"Script 'rejection_script' maximum number of queued runs exceeded!" + r"Script 'rejection_script' maximum total instances \(running \+ queued\) exceeded!" ) # Patterns for Test 5: No params From 70479dec0d1ffd33f8c510939519e0329cbb3ba3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 18 Oct 2025 21:57:19 -1000 Subject: [PATCH 133/336] suggestions --- esphome/components/script/script.h | 3 +-- tests/integration/test_script_queued.py | 8 ++------ 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index a69049840f..84e1e95bf4 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -116,8 +116,7 @@ template class QueueingScript : public Script, public Com // max_runs_ is the maximum *total* instances (running + queued) // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) if (this->num_queued_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, - ESPHOME_LOG_FORMAT("Script '%s' maximum total instances (running + queued) exceeded!"), + this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' max instances (running + queued) reached!"), LOG_STR_ARG(this->name_)); return; } diff --git a/tests/integration/test_script_queued.py b/tests/integration/test_script_queued.py index 9afaaf3286..ce1c25b649 100644 --- a/tests/integration/test_script_queued.py +++ b/tests/integration/test_script_queued.py @@ -31,9 +31,7 @@ async def test_script_queued( # Patterns for Test 1: Queue depth queue_start = re.compile(r"Queue test: START item (\d+)") queue_end = re.compile(r"Queue test: END item (\d+)") - queue_reject = re.compile( - r"Script 'queue_depth_script' maximum total instances \(running \+ queued\) exceeded!" - ) + queue_reject = re.compile(r"Script 'queue_depth_script' max instances") # Patterns for Test 2: Ring buffer ring_start = re.compile(r"Ring buffer: START '([A-Z])'") @@ -46,9 +44,7 @@ async def test_script_queued( # Patterns for Test 4: Rejection reject_start = re.compile(r"Rejection test: START (\d+)") reject_end = re.compile(r"Rejection test: END (\d+)") - reject_reject = re.compile( - r"Script 'rejection_script' maximum total instances \(running \+ queued\) exceeded!" - ) + reject_reject = re.compile(r"Script 'rejection_script' max instances") # Patterns for Test 5: No params no_params_end = re.compile(r"No params: END") From f3cdbd0a05aa3e2969c596b92522c1d1095f4e76 Mon Sep 17 00:00:00 2001 From: tomaszduda23 Date: Sun, 19 Oct 2025 19:39:48 +0200 Subject: [PATCH 134/336] [nrf52] fix task names in logs (#11367) --- esphome/components/logger/logger.h | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/esphome/components/logger/logger.h b/esphome/components/logger/logger.h index 2099520049..dc8e06e0c9 100644 --- a/esphome/components/logger/logger.h +++ b/esphome/components/logger/logger.h @@ -68,6 +68,9 @@ static constexpr char LOG_LEVEL_LETTER_CHARS[] = { // Maximum header size: 35 bytes fixed + 32 bytes tag + 16 bytes thread name = 83 bytes (45 byte safety margin) static constexpr uint16_t MAX_HEADER_SIZE = 128; +// "0x" + 2 hex digits per byte + '\0' +static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1; + #if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) /** Enum for logging UART selection * @@ -177,8 +180,11 @@ class Logger : public Component { inline void HOT format_log_to_buffer_with_terminator_(uint8_t level, const char *tag, int line, const char *format, va_list args, char *buffer, uint16_t *buffer_at, uint16_t buffer_size) { -#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) +#if defined(USE_ESP32) || defined(USE_LIBRETINY) this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(), buffer, buffer_at, buffer_size); +#elif defined(USE_ZEPHYR) + char buff[MAX_POINTER_REPRESENTATION]; + this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(buff), buffer, buffer_at, buffer_size); #else this->write_header_to_buffer_(level, tag, line, nullptr, buffer, buffer_at, buffer_size); #endif @@ -277,7 +283,11 @@ class Logger : public Component { #endif #if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) - const char *HOT get_thread_name_() { + const char *HOT get_thread_name_( +#ifdef USE_ZEPHYR + char *buff +#endif + ) { #ifdef USE_ZEPHYR k_tid_t current_task = k_current_get(); #else @@ -291,7 +301,13 @@ class Logger : public Component { #elif defined(USE_LIBRETINY) return pcTaskGetTaskName(current_task); #elif defined(USE_ZEPHYR) - return k_thread_name_get(current_task); + const char *name = k_thread_name_get(current_task); + if (name) { + // zephyr print task names only if debug component is present + return name; + } + std::snprintf(buff, MAX_POINTER_REPRESENTATION, "%p", current_task); + return buff; #endif } } From 5e1019a6fa139a6839181153cdb9c3e293f94c56 Mon Sep 17 00:00:00 2001 From: tomaszduda23 Date: Sun, 19 Oct 2025 19:41:19 +0200 Subject: [PATCH 135/336] [nrf52, ble_nus] add logging over BLE (#9846) --- CODEOWNERS | 1 + esphome/components/ble_nus/__init__.py | 29 ++++ esphome/components/ble_nus/ble_nus.cpp | 157 ++++++++++++++++++ esphome/components/ble_nus/ble_nus.h | 37 +++++ .../components/zephyr_ble_server/__init__.py | 34 ++++ .../zephyr_ble_server/ble_server.cpp | 100 +++++++++++ .../components/zephyr_ble_server/ble_server.h | 19 +++ script/helpers_zephyr.py | 1 + .../ble_nus/test.nrf52-adafruit.yaml | 2 + .../components/ble_nus/test.nrf52-mcumgr.yaml | 2 + 10 files changed, 382 insertions(+) create mode 100644 esphome/components/ble_nus/__init__.py create mode 100644 esphome/components/ble_nus/ble_nus.cpp create mode 100644 esphome/components/ble_nus/ble_nus.h create mode 100644 esphome/components/zephyr_ble_server/__init__.py create mode 100644 esphome/components/zephyr_ble_server/ble_server.cpp create mode 100644 esphome/components/zephyr_ble_server/ble_server.h create mode 100644 tests/components/ble_nus/test.nrf52-adafruit.yaml create mode 100644 tests/components/ble_nus/test.nrf52-mcumgr.yaml diff --git a/CODEOWNERS b/CODEOWNERS index b5cefa1e0c..09bd15137a 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -70,6 +70,7 @@ esphome/components/bl0939/* @ziceva esphome/components/bl0940/* @dan-s-github @tobias- esphome/components/bl0942/* @dbuezas @dwmw2 esphome/components/ble_client/* @buxtronix @clydebarrow +esphome/components/ble_nus/* @tomaszduda23 esphome/components/bluetooth_proxy/* @bdraco @jesserockz esphome/components/bme280_base/* @esphome/core esphome/components/bme280_spi/* @apbodrov diff --git a/esphome/components/ble_nus/__init__.py b/esphome/components/ble_nus/__init__.py new file mode 100644 index 0000000000..9570005902 --- /dev/null +++ b/esphome/components/ble_nus/__init__.py @@ -0,0 +1,29 @@ +import esphome.codegen as cg +from esphome.components.zephyr import zephyr_add_prj_conf +import esphome.config_validation as cv +from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE + +AUTO_LOAD = ["zephyr_ble_server"] +CODEOWNERS = ["@tomaszduda23"] + +ble_nus_ns = cg.esphome_ns.namespace("ble_nus") +BLENUS = ble_nus_ns.class_("BLENUS", cg.Component) + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(BLENUS), + cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of( + *[CONF_LOGS], lower=True + ), + } + ).extend(cv.COMPONENT_SCHEMA), + cv.only_with_framework("zephyr"), +) + + +async def to_code(config): + var = cg.new_Pvariable(config[CONF_ID]) + zephyr_add_prj_conf("BT_NUS", True) + cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS)) + await cg.register_component(var, config) diff --git a/esphome/components/ble_nus/ble_nus.cpp b/esphome/components/ble_nus/ble_nus.cpp new file mode 100644 index 0000000000..9c4d0a3938 --- /dev/null +++ b/esphome/components/ble_nus/ble_nus.cpp @@ -0,0 +1,157 @@ +#ifdef USE_ZEPHYR +#include "ble_nus.h" +#include +#include +#include "esphome/core/log.h" +#ifdef USE_LOGGER +#include "esphome/components/logger/logger.h" +#include "esphome/core/application.h" +#endif +#include + +namespace esphome::ble_nus { + +constexpr size_t BLE_TX_BUF_SIZE = 2048; + +// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables) +BLENUS *global_ble_nus; +RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE); +// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables) + +static const char *const TAG = "ble_nus"; + +size_t BLENUS::write_array(const uint8_t *data, size_t len) { + if (atomic_get(&this->tx_status_) == TX_DISABLED) { + return 0; + } + return ring_buf_put(&global_ble_tx_ring_buf, data, len); +} + +void BLENUS::connected(bt_conn *conn, uint8_t err) { + if (err == 0) { + global_ble_nus->conn_.store(bt_conn_ref(conn)); + } +} + +void BLENUS::disconnected(bt_conn *conn, uint8_t reason) { + if (global_ble_nus->conn_) { + bt_conn_unref(global_ble_nus->conn_.load()); + // Connection array is global static. + // Reference can be kept even if disconnected. + } +} + +void BLENUS::tx_callback(bt_conn *conn) { + atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED); + ESP_LOGVV(TAG, "Sent operation completed"); +} + +void BLENUS::send_enabled_callback(bt_nus_send_status status) { + switch (status) { + case BT_NUS_SEND_STATUS_ENABLED: + atomic_set(&global_ble_nus->tx_status_, TX_ENABLED); +#ifdef USE_LOGGER + if (global_ble_nus->expose_log_) { + App.schedule_dump_config(); + } +#endif + ESP_LOGD(TAG, "NUS notification has been enabled"); + break; + case BT_NUS_SEND_STATUS_DISABLED: + atomic_set(&global_ble_nus->tx_status_, TX_DISABLED); + ESP_LOGD(TAG, "NUS notification has been disabled"); + break; + } +} + +void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) { + ESP_LOGD(TAG, "Received %d bytes.", len); +} + +void BLENUS::setup() { + bt_nus_cb callbacks = { + .received = rx_callback, + .sent = tx_callback, + .send_enabled = send_enabled_callback, + }; + + bt_nus_init(&callbacks); + + static bt_conn_cb conn_callbacks = { + .connected = BLENUS::connected, + .disconnected = BLENUS::disconnected, + }; + + bt_conn_cb_register(&conn_callbacks); + + global_ble_nus = this; +#ifdef USE_LOGGER + if (logger::global_logger != nullptr && this->expose_log_) { + logger::global_logger->add_on_log_callback( + [this](int level, const char *tag, const char *message, size_t message_len) { + this->write_array(reinterpret_cast(message), message_len); + const char c = '\n'; + this->write_array(reinterpret_cast(&c), 1); + }); + } + +#endif +} + +void BLENUS::dump_config() { + ESP_LOGCONFIG(TAG, "ble nus:"); + ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_)); + uint32_t mtu = 0; + bt_conn *conn = this->conn_.load(); + if (conn) { + mtu = bt_nus_get_mtu(conn); + } + ESP_LOGCONFIG(TAG, " MTU: %u", mtu); +} + +void BLENUS::loop() { + if (ring_buf_is_empty(&global_ble_tx_ring_buf)) { + return; + } + + if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) { + if (atomic_get(&this->tx_status_) == TX_DISABLED) { + ring_buf_reset(&global_ble_tx_ring_buf); + } + return; + } + + bt_conn *conn = this->conn_.load(); + if (conn) { + conn = bt_conn_ref(conn); + } + + if (nullptr == conn) { + atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED); + return; + } + + uint32_t req_len = bt_nus_get_mtu(conn); + + uint8_t *buf; + uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len); + + int err, err2; + + err = bt_nus_send(conn, buf, size); + err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size); + if (err2) { + // It should no happen. + ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2); + } + if (err == 0) { + ESP_LOGVV(TAG, "Sent %d bytes", size); + } else { + ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err); + atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED); + } + bt_conn_unref(conn); +} + +} // namespace esphome::ble_nus +#endif diff --git a/esphome/components/ble_nus/ble_nus.h b/esphome/components/ble_nus/ble_nus.h new file mode 100644 index 0000000000..e8cba32b4c --- /dev/null +++ b/esphome/components/ble_nus/ble_nus.h @@ -0,0 +1,37 @@ +#pragma once +#ifdef USE_ZEPHYR +#include "esphome/core/defines.h" +#include "esphome/core/component.h" +#include +#include + +namespace esphome::ble_nus { + +class BLENUS : public Component { + enum TxStatus { + TX_DISABLED, + TX_ENABLED, + TX_BUSY, + }; + + public: + void setup() override; + void dump_config() override; + void loop() override; + size_t write_array(const uint8_t *data, size_t len); + void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; } + + protected: + static void send_enabled_callback(bt_nus_send_status status); + static void tx_callback(bt_conn *conn); + static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len); + static void connected(bt_conn *conn, uint8_t err); + static void disconnected(bt_conn *conn, uint8_t reason); + + std::atomic conn_ = nullptr; + bool expose_log_ = false; + atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED); +}; + +} // namespace esphome::ble_nus +#endif diff --git a/esphome/components/zephyr_ble_server/__init__.py b/esphome/components/zephyr_ble_server/__init__.py new file mode 100644 index 0000000000..211941e984 --- /dev/null +++ b/esphome/components/zephyr_ble_server/__init__.py @@ -0,0 +1,34 @@ +import esphome.codegen as cg +from esphome.components.zephyr import zephyr_add_prj_conf +import esphome.config_validation as cv +from esphome.const import CONF_ESPHOME, CONF_ID, CONF_NAME, Framework +import esphome.final_validate as fv + +zephyr_ble_server_ns = cg.esphome_ns.namespace("zephyr_ble_server") +BLEServer = zephyr_ble_server_ns.class_("BLEServer", cg.Component) + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(BLEServer), + } + ).extend(cv.COMPONENT_SCHEMA), + cv.only_with_framework(Framework.ZEPHYR), +) + + +def _final_validate(_): + full_config = fv.full_config.get() + zephyr_add_prj_conf("BT_DEVICE_NAME", full_config[CONF_ESPHOME][CONF_NAME]) + + +FINAL_VALIDATE_SCHEMA = _final_validate + + +async def to_code(config): + var = cg.new_Pvariable(config[CONF_ID]) + zephyr_add_prj_conf("BT", True) + zephyr_add_prj_conf("BT_PERIPHERAL", True) + zephyr_add_prj_conf("BT_RX_STACK_SIZE", 1536) + # zephyr_add_prj_conf("BT_LL_SW_SPLIT", True) + await cg.register_component(var, config) diff --git a/esphome/components/zephyr_ble_server/ble_server.cpp b/esphome/components/zephyr_ble_server/ble_server.cpp new file mode 100644 index 0000000000..9f7e606a90 --- /dev/null +++ b/esphome/components/zephyr_ble_server/ble_server.cpp @@ -0,0 +1,100 @@ +#ifdef USE_ZEPHYR +#include "ble_server.h" +#include "esphome/core/defines.h" +#include "esphome/core/log.h" +#include +#include + +namespace esphome::zephyr_ble_server { + +static const char *const TAG = "zephyr_ble_server"; + +static struct k_work advertise_work; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables) + +#define DEVICE_NAME CONFIG_BT_DEVICE_NAME +#define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1) + +static const struct bt_data AD[] = { + BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)), + BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN), +}; + +static const struct bt_data SD[] = { +#ifdef USE_OTA + BT_DATA_BYTES(BT_DATA_UUID128_ALL, 0x84, 0xaa, 0x60, 0x74, 0x52, 0x8a, 0x8b, 0x86, 0xd3, 0x4c, 0xb7, 0x1d, 0x1d, + 0xdc, 0x53, 0x8d), +#endif +}; + +const struct bt_le_adv_param *const ADV_PARAM = BT_LE_ADV_CONN; + +static void advertise(struct k_work *work) { + int rc = bt_le_adv_stop(); + if (rc) { + ESP_LOGE(TAG, "Advertising failed to stop (rc %d)", rc); + } + + rc = bt_le_adv_start(ADV_PARAM, AD, ARRAY_SIZE(AD), SD, ARRAY_SIZE(SD)); + if (rc) { + ESP_LOGE(TAG, "Advertising failed to start (rc %d)", rc); + return; + } + ESP_LOGI(TAG, "Advertising successfully started"); +} + +static void connected(struct bt_conn *conn, uint8_t err) { + if (err) { + ESP_LOGE(TAG, "Connection failed (err 0x%02x)", err); + } else { + ESP_LOGI(TAG, "Connected"); + } +} + +static void disconnected(struct bt_conn *conn, uint8_t reason) { + ESP_LOGI(TAG, "Disconnected (reason 0x%02x)", reason); + k_work_submit(&advertise_work); +} + +static void bt_ready(int err) { + if (err != 0) { + ESP_LOGE(TAG, "Bluetooth failed to initialise: %d", err); + } else { + k_work_submit(&advertise_work); + } +} + +BT_CONN_CB_DEFINE(conn_callbacks) = { + .connected = connected, + .disconnected = disconnected, +}; + +void BLEServer::setup() { + k_work_init(&advertise_work, advertise); + resume_(); +} + +void BLEServer::loop() { + if (this->suspended_) { + resume_(); + this->suspended_ = false; + } +} + +void BLEServer::resume_() { + int rc = bt_enable(bt_ready); + if (rc != 0) { + ESP_LOGE(TAG, "Bluetooth enable failed: %d", rc); + return; + } +} + +void BLEServer::on_shutdown() { + struct k_work_sync sync; + k_work_cancel_sync(&advertise_work, &sync); + bt_disable(); + this->suspended_ = true; +} + +} // namespace esphome::zephyr_ble_server + +#endif diff --git a/esphome/components/zephyr_ble_server/ble_server.h b/esphome/components/zephyr_ble_server/ble_server.h new file mode 100644 index 0000000000..1b32e9b58c --- /dev/null +++ b/esphome/components/zephyr_ble_server/ble_server.h @@ -0,0 +1,19 @@ +#pragma once +#ifdef USE_ZEPHYR +#include "esphome/core/component.h" + +namespace esphome::zephyr_ble_server { + +class BLEServer : public Component { + public: + void setup() override; + void loop() override; + void on_shutdown() override; + + protected: + void resume_(); + bool suspended_ = false; +}; + +} // namespace esphome::zephyr_ble_server +#endif diff --git a/script/helpers_zephyr.py b/script/helpers_zephyr.py index 922f1171b4..f72b335e64 100644 --- a/script/helpers_zephyr.py +++ b/script/helpers_zephyr.py @@ -25,6 +25,7 @@ int main() { return 0;} Path(zephyr_dir / "prj.conf").write_text( """ CONFIG_NEWLIB_LIBC=y +CONFIG_BT=y CONFIG_ADC=y """, encoding="utf-8", diff --git a/tests/components/ble_nus/test.nrf52-adafruit.yaml b/tests/components/ble_nus/test.nrf52-adafruit.yaml new file mode 100644 index 0000000000..20eec16956 --- /dev/null +++ b/tests/components/ble_nus/test.nrf52-adafruit.yaml @@ -0,0 +1,2 @@ +ble_nus: + type: logs diff --git a/tests/components/ble_nus/test.nrf52-mcumgr.yaml b/tests/components/ble_nus/test.nrf52-mcumgr.yaml new file mode 100644 index 0000000000..20eec16956 --- /dev/null +++ b/tests/components/ble_nus/test.nrf52-mcumgr.yaml @@ -0,0 +1,2 @@ +ble_nus: + type: logs From 40823df7bc5ac6818c6b2513151a34a3eee2ac92 Mon Sep 17 00:00:00 2001 From: Juan Antonio Aldea Date: Sun, 19 Oct 2025 19:47:31 +0200 Subject: [PATCH 136/336] make types sensors_t and sensor_type_t internal to StatsdComponent. (#11345) --- esphome/components/statsd/statsd.h | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/esphome/components/statsd/statsd.h b/esphome/components/statsd/statsd.h index 34f84cbe00..eab77a7a6e 100644 --- a/esphome/components/statsd/statsd.h +++ b/esphome/components/statsd/statsd.h @@ -28,21 +28,6 @@ namespace esphome { namespace statsd { -using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR }; - -using sensors_t = struct { - const char *name; - sensor_type_t type; - union { -#ifdef USE_SENSOR - esphome::sensor::Sensor *sensor; -#endif -#ifdef USE_BINARY_SENSOR - esphome::binary_sensor::BinarySensor *binary_sensor; -#endif - }; -}; - class StatsdComponent : public PollingComponent { public: ~StatsdComponent(); @@ -71,6 +56,20 @@ class StatsdComponent : public PollingComponent { const char *prefix_; uint16_t port_; + using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR }; + using sensors_t = struct { + const char *name; + sensor_type_t type; + union { +#ifdef USE_SENSOR + esphome::sensor::Sensor *sensor; +#endif +#ifdef USE_BINARY_SENSOR + esphome::binary_sensor::BinarySensor *binary_sensor; +#endif + }; + }; + std::vector sensors_; #ifdef USE_ESP8266 From 5db07c2d708c2eb15a1fa7f032735ac1fc907d66 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:31:40 -1000 Subject: [PATCH 137/336] [api][time] Refactor timezone update logic for cleaner code (#11327) --- esphome/components/api/api_connection.cpp | 9 ++------- esphome/components/time/real_time_clock.h | 8 ++++++++ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index ea1e130092..7dfefedd54 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -1082,13 +1082,8 @@ void APIConnection::on_get_time_response(const GetTimeResponse &value) { homeassistant::global_homeassistant_time->set_epoch_time(value.epoch_seconds); #ifdef USE_TIME_TIMEZONE if (value.timezone_len > 0) { - const std::string ¤t_tz = homeassistant::global_homeassistant_time->get_timezone(); - // Compare without allocating a string - if (current_tz.length() != value.timezone_len || - memcmp(current_tz.c_str(), value.timezone, value.timezone_len) != 0) { - homeassistant::global_homeassistant_time->set_timezone( - std::string(reinterpret_cast(value.timezone), value.timezone_len)); - } + homeassistant::global_homeassistant_time->set_timezone(reinterpret_cast(value.timezone), + value.timezone_len); } #endif } diff --git a/esphome/components/time/real_time_clock.h b/esphome/components/time/real_time_clock.h index 4b98a88975..7e60bbd234 100644 --- a/esphome/components/time/real_time_clock.h +++ b/esphome/components/time/real_time_clock.h @@ -27,6 +27,14 @@ class RealTimeClock : public PollingComponent { this->apply_timezone_(); } + /// Set the time zone from raw buffer, only if it differs from the current one. + void set_timezone(const char *tz, size_t len) { + if (this->timezone_.length() != len || memcmp(this->timezone_.c_str(), tz, len) != 0) { + this->timezone_.assign(tz, len); + this->apply_timezone_(); + } + } + /// Get the time zone currently in use. std::string get_timezone() { return this->timezone_; } #endif From f25af18655d626824fab1951041ac57b15accbde Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:34:34 -1000 Subject: [PATCH 138/336] [scheduler] Replace defer queue deque with vector to avoid 512-byte upfront allocation (#11305) --- esphome/core/scheduler.cpp | 36 +++++++++++++++++++------ esphome/core/scheduler.h | 54 ++++++++++++++++++++++++++++++++++---- 2 files changed, 77 insertions(+), 13 deletions(-) diff --git a/esphome/core/scheduler.cpp b/esphome/core/scheduler.cpp index 402084f306..0d4715f621 100644 --- a/esphome/core/scheduler.cpp +++ b/esphome/core/scheduler.cpp @@ -328,17 +328,30 @@ void HOT Scheduler::call(uint32_t now) { // Single-core platforms don't use this queue and fall back to the heap-based approach. // // Note: Items cancelled via cancel_item_locked_() are marked with remove=true but still - // processed here. They are removed from the queue normally via pop_front() but skipped - // during execution by should_skip_item_(). This is intentional - no memory leak occurs. - while (!this->defer_queue_.empty()) { - // The outer check is done without a lock for performance. If the queue - // appears non-empty, we lock and process an item. We don't need to check - // empty() again inside the lock because only this thread can remove items. + // processed here. They are skipped during execution by should_skip_item_(). + // This is intentional - no memory leak occurs. + // + // We use an index (defer_queue_front_) to track the read position instead of calling + // erase() on every pop, which would be O(n). The queue is processed once per loop - + // any items added during processing are left for the next loop iteration. + + // Snapshot the queue end point - only process items that existed at loop start + // Items added during processing (by callbacks or other threads) run next loop + // No lock needed: single consumer (main loop), stale read just means we process less this iteration + size_t defer_queue_end = this->defer_queue_.size(); + + while (this->defer_queue_front_ < defer_queue_end) { std::unique_ptr item; { LockGuard lock(this->lock_); - item = std::move(this->defer_queue_.front()); - this->defer_queue_.pop_front(); + // SAFETY: Moving out the unique_ptr leaves a nullptr in the vector at defer_queue_front_. + // This is intentional and safe because: + // 1. The vector is only cleaned up by cleanup_defer_queue_locked_() at the end of this function + // 2. Any code iterating defer_queue_ MUST check for nullptr items (see mark_matching_items_removed_ + // and has_cancelled_timeout_in_container_ in scheduler.h) + // 3. The lock protects concurrent access, but the nullptr remains until cleanup + item = std::move(this->defer_queue_[this->defer_queue_front_]); + this->defer_queue_front_++; } // Execute callback without holding lock to prevent deadlocks @@ -349,6 +362,13 @@ void HOT Scheduler::call(uint32_t now) { // Recycle the defer item after execution this->recycle_item_(std::move(item)); } + + // If we've consumed all items up to the snapshot point, clean up the dead space + // Single consumer (main loop), so no lock needed for this check + if (this->defer_queue_front_ >= defer_queue_end) { + LockGuard lock(this->lock_); + this->cleanup_defer_queue_locked_(); + } #endif /* not ESPHOME_THREAD_SINGLE */ // Convert the fresh timestamp from main loop to 64-bit for scheduler operations diff --git a/esphome/core/scheduler.h b/esphome/core/scheduler.h index 2237915e07..ad0ec0284e 100644 --- a/esphome/core/scheduler.h +++ b/esphome/core/scheduler.h @@ -264,6 +264,36 @@ class Scheduler { // Helper to recycle a SchedulerItem void recycle_item_(std::unique_ptr item); +#ifndef ESPHOME_THREAD_SINGLE + // Helper to cleanup defer_queue_ after processing + // IMPORTANT: Caller must hold the scheduler lock before calling this function. + inline void cleanup_defer_queue_locked_() { + // Check if new items were added by producers during processing + if (this->defer_queue_front_ >= this->defer_queue_.size()) { + // Common case: no new items - clear everything + this->defer_queue_.clear(); + } else { + // Rare case: new items were added during processing - compact the vector + // This only happens when: + // 1. A deferred callback calls defer() again, or + // 2. Another thread calls defer() while we're processing + // + // Move unprocessed items (added during this loop) to the front for next iteration + // + // SAFETY: Compacted items may include cancelled items (marked for removal via + // cancel_item_locked_() during execution). This is safe because should_skip_item_() + // checks is_item_removed_() before executing, so cancelled items will be skipped + // and recycled on the next loop iteration. + size_t remaining = this->defer_queue_.size() - this->defer_queue_front_; + for (size_t i = 0; i < remaining; i++) { + this->defer_queue_[i] = std::move(this->defer_queue_[this->defer_queue_front_ + i]); + } + this->defer_queue_.resize(remaining); + } + this->defer_queue_front_ = 0; + } +#endif /* not ESPHOME_THREAD_SINGLE */ + // Helper to check if item is marked for removal (platform-specific) // Returns true if item should be skipped, handles platform-specific synchronization // For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this @@ -282,13 +312,18 @@ class Scheduler { // Helper to mark matching items in a container as removed // Returns the number of items marked for removal - // For ESPHOME_THREAD_MULTI_NO_ATOMICS platforms, the caller must hold the scheduler lock before calling this - // function. + // IMPORTANT: Caller must hold the scheduler lock before calling this function. template size_t mark_matching_items_removed_(Container &container, Component *component, const char *name_cstr, SchedulerItem::Type type, bool match_retry) { size_t count = 0; for (auto &item : container) { + // Skip nullptr items (can happen in defer_queue_ when items are being processed) + // The defer_queue_ uses index-based processing: items are std::moved out but left in the + // vector as nullptr until cleanup. Even though this function is called with lock held, + // the vector can still contain nullptr items from the processing loop. This check prevents crashes. + if (!item) + continue; if (this->matches_item_(item, component, name_cstr, type, match_retry)) { // Mark item for removal (platform-specific) #ifdef ESPHOME_THREAD_MULTI_ATOMICS @@ -311,6 +346,12 @@ class Scheduler { bool has_cancelled_timeout_in_container_(const Container &container, Component *component, const char *name_cstr, bool match_retry) const { for (const auto &item : container) { + // Skip nullptr items (can happen in defer_queue_ when items are being processed) + // The defer_queue_ uses index-based processing: items are std::moved out but left in the + // vector as nullptr until cleanup. If this function is called during defer queue processing, + // it will iterate over these nullptr items. This check prevents crashes. + if (!item) + continue; if (is_item_removed_(item.get()) && this->matches_item_(item, component, name_cstr, SchedulerItem::TIMEOUT, match_retry, /* skip_removed= */ false)) { @@ -324,9 +365,12 @@ class Scheduler { std::vector> items_; std::vector> to_add_; #ifndef ESPHOME_THREAD_SINGLE - // Single-core platforms don't need the defer queue and save 40 bytes of RAM - std::deque> defer_queue_; // FIFO queue for defer() calls -#endif /* ESPHOME_THREAD_SINGLE */ + // Single-core platforms don't need the defer queue and save ~32 bytes of RAM + // Using std::vector instead of std::deque avoids 512-byte chunked allocations + // Index tracking avoids O(n) erase() calls when draining the queue each loop + std::vector> defer_queue_; // FIFO queue for defer() calls + size_t defer_queue_front_{0}; // Index of first valid item in defer_queue_ (tracks consumed items) +#endif /* ESPHOME_THREAD_SINGLE */ uint32_t to_remove_{0}; // Memory pool for recycling SchedulerItem objects to reduce heap churn. From 1586a185a0b7692aa72b61feab332b6e347ee684 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:34:38 -1000 Subject: [PATCH 139/336] [esp32] Automatic CONFIG_LWIP_MAX_SOCKETS configuration based on component needs --- esphome/components/api/__init__.py | 12 ++++ esphome/components/esp32/__init__.py | 66 +++++++++++++++++++ .../esp32_camera_web_server/__init__.py | 29 ++++++-- esphome/components/esphome/ota/__init__.py | 14 +++- esphome/components/mdns/__init__.py | 14 ++++ esphome/components/mqtt/__init__.py | 10 +++ esphome/components/socket/__init__.py | 28 ++++++++ esphome/components/web_server/__init__.py | 13 ++++ 8 files changed, 177 insertions(+), 9 deletions(-) diff --git a/esphome/components/api/__init__.py b/esphome/components/api/__init__.py index e8dacf51bc..e91e922204 100644 --- a/esphome/components/api/__init__.py +++ b/esphome/components/api/__init__.py @@ -155,6 +155,17 @@ def _validate_api_config(config: ConfigType) -> ConfigType: return config +def _consume_api_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for API component.""" + from esphome.components import socket + + # API needs 1 listening socket + typically 3 concurrent client connections + # (not max_connections, which is the upper limit rarely reached) + sockets_needed = 1 + 3 + socket.consume_sockets(sockets_needed, "api")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -222,6 +233,7 @@ CONFIG_SCHEMA = cv.All( ).extend(cv.COMPONENT_SCHEMA), cv.rename_key(CONF_SERVICES, CONF_ACTIONS), _validate_api_config, + _consume_api_sockets, ) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index b7dd25e0d8..383bbf19ee 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -1,3 +1,4 @@ +import contextlib from dataclasses import dataclass import itertools import logging @@ -102,6 +103,10 @@ COMPILER_OPTIMIZATIONS = { "SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE", } +# Socket limit configuration for ESP-IDF +# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10 +DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default + ARDUINO_ALLOWED_VARIANTS = [ VARIANT_ESP32, VARIANT_ESP32C3, @@ -855,6 +860,67 @@ async def to_code(config): add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False) if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False): add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0) + + # Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs + # Socket component tracks consumer needs via consume_sockets() called during config validation + # This code runs in to_code() after all components have registered their socket needs + # User-provided sdkconfig_options take precedence + from esphome.components.socket import KEY_SOCKET_CONSUMERS + + # Check if user manually specified CONFIG_LWIP_MAX_SOCKETS + user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get( + "CONFIG_LWIP_MAX_SOCKETS" + ) + + socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {}) + total_sockets = sum(socket_consumers.values()) + components_list = ( + ", ".join(f"{name}={count}" for name, count in sorted(socket_consumers.items())) + if total_sockets > 0 + else "" + ) + + if user_max_sockets is None: + # Auto-calculate based on component needs + # Use at least the ESP-IDF default (10), or the total needed by components + max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets) + + if total_sockets > 0: + log_level = ( + logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG + ) + _LOGGER.log( + log_level, + "Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)", + max_sockets, + components_list, + ) + + add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets) + else: + # User specified their own value - respect it + _LOGGER.info( + "Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s", + user_max_sockets, + ) + + # Warn if user's value is less than what components need + if total_sockets > 0: + user_sockets_int = 0 + with contextlib.suppress(ValueError, TypeError): + user_sockets_int = int(user_max_sockets) + + if user_sockets_int < total_sockets: + _LOGGER.warning( + "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration needs %d sockets (registered: %s). " + "You may experience socket exhaustion errors. Consider increasing to at least %d.", + user_sockets_int, + total_sockets, + components_list, + total_sockets, + ) + # User's value already added via sdkconfig_options processing + if advanced.get(CONF_EXECUTE_FROM_PSRAM, False): add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True) add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True) diff --git a/esphome/components/esp32_camera_web_server/__init__.py b/esphome/components/esp32_camera_web_server/__init__.py index a6a7ac3630..315cd649d1 100644 --- a/esphome/components/esp32_camera_web_server/__init__.py +++ b/esphome/components/esp32_camera_web_server/__init__.py @@ -1,6 +1,7 @@ import esphome.codegen as cg import esphome.config_validation as cv from esphome.const import CONF_ID, CONF_MODE, CONF_PORT +from esphome.types import ConfigType CODEOWNERS = ["@ayufan"] AUTO_LOAD = ["camera"] @@ -13,13 +14,27 @@ Mode = esp32_camera_web_server_ns.enum("Mode") MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT} -CONFIG_SCHEMA = cv.Schema( - { - cv.GenerateID(): cv.declare_id(CameraWebServer), - cv.Required(CONF_PORT): cv.port, - cv.Required(CONF_MODE): cv.enum(MODES, upper=True), - }, -).extend(cv.COMPONENT_SCHEMA) + +def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for camera web server.""" + from esphome.components import socket + + # Each camera web server instance needs 1 listening socket + 1-2 client connections + sockets_needed = 2 + socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config) + return config + + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(CameraWebServer), + cv.Required(CONF_PORT): cv.port, + cv.Required(CONF_MODE): cv.enum(MODES, upper=True), + }, + ).extend(cv.COMPONENT_SCHEMA), + _consume_camera_web_server_sockets, +) async def to_code(config): diff --git a/esphome/components/esphome/ota/__init__.py b/esphome/components/esphome/ota/__init__.py index 69a50a2de9..e56e85b231 100644 --- a/esphome/components/esphome/ota/__init__.py +++ b/esphome/components/esphome/ota/__init__.py @@ -103,7 +103,16 @@ def ota_esphome_final_validate(config): ) -CONFIG_SCHEMA = ( +def _consume_ota_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for OTA component.""" + from esphome.components import socket + + # OTA needs 1 listening socket (client connections are temporary during updates) + socket.consume_sockets(1, "ota")(config) + return config + + +CONFIG_SCHEMA = cv.All( cv.Schema( { cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent), @@ -130,7 +139,8 @@ CONFIG_SCHEMA = ( } ) .extend(BASE_OTA_SCHEMA) - .extend(cv.COMPONENT_SCHEMA) + .extend(cv.COMPONENT_SCHEMA), + _consume_ota_sockets, ) FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate diff --git a/esphome/components/mdns/__init__.py b/esphome/components/mdns/__init__.py index c6a9ee1a0c..6b4578ac23 100644 --- a/esphome/components/mdns/__init__.py +++ b/esphome/components/mdns/__init__.py @@ -46,6 +46,19 @@ SERVICE_SCHEMA = cv.Schema( } ) + +def _consume_mdns_sockets(config): + """Register socket needs for mDNS component.""" + if config.get(CONF_DISABLED): + return config + + from esphome.components import socket + + # mDNS needs 2 sockets (IPv4 + IPv6 multicast) + socket.consume_sockets(2, "mdns")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -55,6 +68,7 @@ CONFIG_SCHEMA = cv.All( } ), _remove_id_if_disabled, + _consume_mdns_sockets, ) diff --git a/esphome/components/mqtt/__init__.py b/esphome/components/mqtt/__init__.py index 814fb566d4..3866e09a24 100644 --- a/esphome/components/mqtt/__init__.py +++ b/esphome/components/mqtt/__init__.py @@ -210,6 +210,15 @@ def validate_fingerprint(value): return value +def _consume_mqtt_sockets(config): + """Register socket needs for MQTT component.""" + from esphome.components import socket + + # MQTT needs 1 socket for the broker connection + socket.consume_sockets(1, "mqtt")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -306,6 +315,7 @@ CONFIG_SCHEMA = cv.All( ), validate_config, cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]), + _consume_mqtt_sockets, ) diff --git a/esphome/components/socket/__init__.py b/esphome/components/socket/__init__.py index e085a09eac..e6a4cfc07f 100644 --- a/esphome/components/socket/__init__.py +++ b/esphome/components/socket/__init__.py @@ -1,3 +1,5 @@ +from collections.abc import Callable, MutableMapping + import esphome.codegen as cg import esphome.config_validation as cv from esphome.core import CORE @@ -9,6 +11,32 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp" IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets" IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets" +# Socket tracking infrastructure +# Components register their socket needs and platforms read this to configure appropriately +KEY_SOCKET_CONSUMERS = "socket_consumers" + + +def consume_sockets( + value: int, consumer: str +) -> Callable[[MutableMapping], MutableMapping]: + """Register socket usage for a component. + + Args: + value: Number of sockets needed by the component + consumer: Name of the component consuming the sockets + + Returns: + A validator function that records the socket usage + """ + + def _consume_sockets(config: MutableMapping) -> MutableMapping: + consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {}) + consumers[consumer] = consumers.get(consumer, 0) + value + return config + + return _consume_sockets + + CONFIG_SCHEMA = cv.Schema( { cv.SplitDefault( diff --git a/esphome/components/web_server/__init__.py b/esphome/components/web_server/__init__.py index 288d928e80..a7fdf30eef 100644 --- a/esphome/components/web_server/__init__.py +++ b/esphome/components/web_server/__init__.py @@ -136,6 +136,18 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType: FINAL_VALIDATE_SCHEMA = _final_validate_sorting + +def _consume_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for web_server component.""" + from esphome.components import socket + + # Web server needs 1 listening socket + typically 2 concurrent client connections + # (browser makes 2 connections for page + event stream) + sockets_needed = 3 + socket.consume_sockets(sockets_needed, "web_server")(config) + return config + + sorting_group = { cv.Required(CONF_ID): cv.declare_id(cg.int_), cv.Required(CONF_NAME): cv.string, @@ -205,6 +217,7 @@ CONFIG_SCHEMA = cv.All( validate_local, validate_sorting_groups, validate_ota, + _consume_web_server_sockets, ) From 55473991a903a9195e6fa9c96d3b125f4a4da7a0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:37:43 -1000 Subject: [PATCH 140/336] preen --- esphome/components/mdns/__init__.py | 3 ++- esphome/components/mqtt/__init__.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/esphome/components/mdns/__init__.py b/esphome/components/mdns/__init__.py index 6b4578ac23..4776bef22f 100644 --- a/esphome/components/mdns/__init__.py +++ b/esphome/components/mdns/__init__.py @@ -13,6 +13,7 @@ from esphome.const import ( ) from esphome.core import CORE, Lambda, coroutine_with_priority from esphome.coroutine import CoroPriority +from esphome.types import ConfigType CODEOWNERS = ["@esphome/core"] DEPENDENCIES = ["network"] @@ -47,7 +48,7 @@ SERVICE_SCHEMA = cv.Schema( ) -def _consume_mdns_sockets(config): +def _consume_mdns_sockets(config: ConfigType) -> ConfigType: """Register socket needs for mDNS component.""" if config.get(CONF_DISABLED): return config diff --git a/esphome/components/mqtt/__init__.py b/esphome/components/mqtt/__init__.py index 3866e09a24..641c70a367 100644 --- a/esphome/components/mqtt/__init__.py +++ b/esphome/components/mqtt/__init__.py @@ -58,6 +58,7 @@ from esphome.const import ( PlatformFramework, ) from esphome.core import CORE, CoroPriority, coroutine_with_priority +from esphome.types import ConfigType DEPENDENCIES = ["network"] @@ -210,7 +211,7 @@ def validate_fingerprint(value): return value -def _consume_mqtt_sockets(config): +def _consume_mqtt_sockets(config: ConfigType) -> ConfigType: """Register socket needs for MQTT component.""" from esphome.components import socket From 7107f5d984a48ef1f91b6121974389202df51258 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:40:01 -1000 Subject: [PATCH 141/336] preen --- esphome/components/esp32/__init__.py | 126 ++++++++++++++------------- 1 file changed, 67 insertions(+), 59 deletions(-) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 383bbf19ee..7fdf6d340a 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -751,6 +751,72 @@ CONFIG_SCHEMA = cv.All( FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate) +def _configure_lwip_max_sockets(conf: dict) -> None: + """Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs. + + Socket component tracks consumer needs via consume_sockets() called during config validation. + This function runs in to_code() after all components have registered their socket needs. + User-provided sdkconfig_options take precedence. + """ + from esphome.components.socket import KEY_SOCKET_CONSUMERS + + # Check if user manually specified CONFIG_LWIP_MAX_SOCKETS + user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get( + "CONFIG_LWIP_MAX_SOCKETS" + ) + + socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {}) + total_sockets = sum(socket_consumers.values()) + + # Early return if no sockets registered and no user override + if total_sockets == 0 and user_max_sockets is None: + add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", DEFAULT_MAX_SOCKETS) + return + + components_list = ", ".join( + f"{name}={count}" for name, count in sorted(socket_consumers.items()) + ) + + # User specified their own value - respect it but warn if insufficient + if user_max_sockets is not None: + _LOGGER.info( + "Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s", + user_max_sockets, + ) + + # Warn if user's value is less than what components need + if total_sockets > 0: + user_sockets_int = 0 + with contextlib.suppress(ValueError, TypeError): + user_sockets_int = int(user_max_sockets) + + if user_sockets_int < total_sockets: + _LOGGER.warning( + "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration needs %d sockets (registered: %s). " + "You may experience socket exhaustion errors. Consider increasing to at least %d.", + user_sockets_int, + total_sockets, + components_list, + total_sockets, + ) + # User's value already added via sdkconfig_options processing + return + + # Auto-calculate based on component needs + # Use at least the ESP-IDF default (10), or the total needed by components + max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets) + + log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG + _LOGGER.log( + log_level, + "Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)", + max_sockets, + components_list, + ) + + add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets) + + async def to_code(config): cg.add_platformio_option("board", config[CONF_BOARD]) cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE]) @@ -861,65 +927,7 @@ async def to_code(config): if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False): add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0) - # Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs - # Socket component tracks consumer needs via consume_sockets() called during config validation - # This code runs in to_code() after all components have registered their socket needs - # User-provided sdkconfig_options take precedence - from esphome.components.socket import KEY_SOCKET_CONSUMERS - - # Check if user manually specified CONFIG_LWIP_MAX_SOCKETS - user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get( - "CONFIG_LWIP_MAX_SOCKETS" - ) - - socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {}) - total_sockets = sum(socket_consumers.values()) - components_list = ( - ", ".join(f"{name}={count}" for name, count in sorted(socket_consumers.items())) - if total_sockets > 0 - else "" - ) - - if user_max_sockets is None: - # Auto-calculate based on component needs - # Use at least the ESP-IDF default (10), or the total needed by components - max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets) - - if total_sockets > 0: - log_level = ( - logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG - ) - _LOGGER.log( - log_level, - "Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)", - max_sockets, - components_list, - ) - - add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets) - else: - # User specified their own value - respect it - _LOGGER.info( - "Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s", - user_max_sockets, - ) - - # Warn if user's value is less than what components need - if total_sockets > 0: - user_sockets_int = 0 - with contextlib.suppress(ValueError, TypeError): - user_sockets_int = int(user_max_sockets) - - if user_sockets_int < total_sockets: - _LOGGER.warning( - "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration needs %d sockets (registered: %s). " - "You may experience socket exhaustion errors. Consider increasing to at least %d.", - user_sockets_int, - total_sockets, - components_list, - total_sockets, - ) - # User's value already added via sdkconfig_options processing + _configure_lwip_max_sockets(conf) if advanced.get(CONF_EXECUTE_FROM_PSRAM, False): add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True) From 148a78aa015a7723230b6521f865deceea89444a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:41:21 -1000 Subject: [PATCH 142/336] preen --- esphome/components/esp32/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 7fdf6d340a..6764764644 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -770,7 +770,6 @@ def _configure_lwip_max_sockets(conf: dict) -> None: # Early return if no sockets registered and no user override if total_sockets == 0 and user_max_sockets is None: - add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", DEFAULT_MAX_SOCKETS) return components_list = ", ".join( @@ -792,8 +791,9 @@ def _configure_lwip_max_sockets(conf: dict) -> None: if user_sockets_int < total_sockets: _LOGGER.warning( - "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration needs %d sockets (registered: %s). " - "You may experience socket exhaustion errors. Consider increasing to at least %d.", + "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration " + "needs %d sockets (registered: %s). You may experience socket " + "exhaustion errors. Consider increasing to at least %d.", user_sockets_int, total_sockets, components_list, From 4fa908d0b8ed199cdd27020e54751725ddf476cf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:43:30 -1000 Subject: [PATCH 143/336] preen --- esphome/components/esp32_camera_web_server/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/esp32_camera_web_server/__init__.py b/esphome/components/esp32_camera_web_server/__init__.py index 315cd649d1..ed1aaa2e07 100644 --- a/esphome/components/esp32_camera_web_server/__init__.py +++ b/esphome/components/esp32_camera_web_server/__init__.py @@ -19,8 +19,8 @@ def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType: """Register socket needs for camera web server.""" from esphome.components import socket - # Each camera web server instance needs 1 listening socket + 1-2 client connections - sockets_needed = 2 + # Each camera web server instance needs 1 listening socket + 2 client connections + sockets_needed = 3 socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config) return config From a0922bc8b0d1bf6260b946a442cfea3253a776f5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:43:38 -1000 Subject: [PATCH 144/336] [ci] Add automated memory impact analysis for pull requests (#11242) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> --- .coveragerc | 1 + .github/workflows/ci.yml | 291 ++++++ esphome/__main__.py | 4 +- esphome/analyze_memory/__init__.py | 502 ++++++++++ esphome/analyze_memory/__main__.py | 6 + esphome/analyze_memory/cli.py | 408 ++++++++ esphome/analyze_memory/const.py | 903 ++++++++++++++++++ esphome/analyze_memory/helpers.py | 121 +++ esphome/platformio_api.py | 20 + script/analyze_component_buses.py | 14 +- script/ci_helpers.py | 23 + script/ci_memory_impact_comment.py | 570 +++++++++++ script/ci_memory_impact_extract.py | 281 ++++++ script/determine-jobs.py | 191 +++- script/helpers.py | 79 +- script/list-components.py | 10 +- script/split_components_for_ci.py | 10 +- .../ci_memory_impact_comment_template.j2 | 27 + .../ci_memory_impact_component_breakdown.j2 | 15 + script/templates/ci_memory_impact_macros.j2 | 8 + .../ci_memory_impact_symbol_changes.j2 | 51 + script/test_build_components.py | 21 +- tests/script/test_determine_jobs.py | 229 ++++- tests/unit_tests/test_platformio_api.py | 36 + 24 files changed, 3772 insertions(+), 49 deletions(-) create mode 100644 esphome/analyze_memory/__init__.py create mode 100644 esphome/analyze_memory/__main__.py create mode 100644 esphome/analyze_memory/cli.py create mode 100644 esphome/analyze_memory/const.py create mode 100644 esphome/analyze_memory/helpers.py create mode 100755 script/ci_helpers.py create mode 100755 script/ci_memory_impact_comment.py create mode 100755 script/ci_memory_impact_extract.py create mode 100644 script/templates/ci_memory_impact_comment_template.j2 create mode 100644 script/templates/ci_memory_impact_component_breakdown.j2 create mode 100644 script/templates/ci_memory_impact_macros.j2 create mode 100644 script/templates/ci_memory_impact_symbol_changes.j2 diff --git a/.coveragerc b/.coveragerc index f23592be24..c15e79a31b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,4 +1,5 @@ [run] omit = esphome/components/* + esphome/analyze_memory/* tests/integration/* diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 87e182fe4d..42f934de9d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -175,6 +175,7 @@ jobs: changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }} directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }} component-test-count: ${{ steps.determine.outputs.component-test-count }} + memory_impact: ${{ steps.determine.outputs.memory-impact }} steps: - name: Check out code from GitHub uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -204,6 +205,7 @@ jobs: echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT + echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT integration-tests: name: Run integration tests @@ -521,6 +523,292 @@ jobs: - uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0 if: always() + memory-impact-target-branch: + name: Build target branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }} + skip: ${{ steps.check-script.outputs.skip }} + steps: + - name: Check out target branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + ref: ${{ github.base_ref }} + + # Check if memory impact extraction script exists on target branch + # If not, skip the analysis (this handles older branches that don't have the feature) + - name: Check for memory impact script + id: check-script + run: | + if [ -f "script/ci_memory_impact_extract.py" ]; then + echo "skip=false" >> $GITHUB_OUTPUT + else + echo "skip=true" >> $GITHUB_OUTPUT + echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis" + fi + + # All remaining steps only run if script exists + - name: Generate cache key + id: cache-key + if: steps.check-script.outputs.skip != 'true' + run: | + # Get the commit SHA of the target branch + target_sha=$(git rev-parse HEAD) + + # Hash the build infrastructure files (all files that affect build/analysis) + infra_hash=$(cat \ + script/test_build_components.py \ + script/ci_memory_impact_extract.py \ + script/analyze_component_buses.py \ + script/merge_component_configs.py \ + script/ci_helpers.py \ + .github/workflows/ci.yml \ + | sha256sum | cut -d' ' -f1) + + # Get platform and components from job inputs + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1) + + # Combine into cache key + cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}" + echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT + echo "Cache key: ${cache_key}" + + - name: Restore cached memory analysis + id: cache-memory-analysis + if: steps.check-script.outputs.skip != 'true' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + - name: Cache status + if: steps.check-script.outputs.skip != 'true' + run: | + if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then + echo "✓ Cache hit! Using cached memory analysis results." + echo " Skipping build step to save time." + else + echo "✗ Cache miss. Will build and analyze memory usage." + fi + + - name: Restore Python + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + + - name: Build, compile, and analyze memory + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + id: build + run: | + . venv/bin/activate + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + + # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + tee /dev/stderr | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-target.json + + - name: Save memory analysis to cache + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + - name: Extract memory usage for outputs + id: extract + if: steps.check-script.outputs.skip != 'true' + run: | + if [ -f memory-analysis-target.json ]; then + ram=$(jq -r '.ram_bytes' memory-analysis-target.json) + flash=$(jq -r '.flash_bytes' memory-analysis-target.json) + echo "ram_usage=${ram}" >> $GITHUB_OUTPUT + echo "flash_usage=${flash}" >> $GITHUB_OUTPUT + echo "RAM: ${ram} bytes, Flash: ${flash} bytes" + else + echo "Error: memory-analysis-target.json not found" + exit 1 + fi + + - name: Upload memory analysis JSON + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: memory-analysis-target + path: memory-analysis-target.json + if-no-files-found: warn + retention-days: 1 + + memory-impact-pr-branch: + name: Build PR branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out PR branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + - name: Build, compile, and analyze memory + id: extract + run: | + . venv/bin/activate + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + + # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + tee /dev/stderr | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-pr.json + - name: Upload memory analysis JSON + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: memory-analysis-pr + path: memory-analysis-pr.json + if-no-files-found: warn + retention-days: 1 + + memory-impact-comment: + name: Comment memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + - memory-impact-target-branch + - memory-impact-pr-branch + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true' + permissions: + contents: read + pull-requests: write + steps: + - name: Check out code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Download target analysis JSON + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: memory-analysis-target + path: ./memory-analysis + continue-on-error: true + - name: Download PR analysis JSON + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: memory-analysis-pr + path: ./memory-analysis + continue-on-error: true + - name: Post or update PR comment + env: + GH_TOKEN: ${{ github.token }} + COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }} + PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }} + TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }} + TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }} + PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }} + PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }} + TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }} + run: | + . venv/bin/activate + + # Check if analysis JSON files exist + target_json_arg="" + pr_json_arg="" + + if [ -f ./memory-analysis/memory-analysis-target.json ]; then + echo "Found target analysis JSON" + target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json" + else + echo "No target analysis JSON found" + fi + + if [ -f ./memory-analysis/memory-analysis-pr.json ]; then + echo "Found PR analysis JSON" + pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json" + else + echo "No PR analysis JSON found" + fi + + # Add cache flag if target was cached + cache_flag="" + if [ "$TARGET_CACHE_HIT" == "true" ]; then + cache_flag="--target-cache-hit" + fi + + python script/ci_memory_impact_comment.py \ + --pr-number "${{ github.event.pull_request.number }}" \ + --components "$COMPONENTS" \ + --platform "$PLATFORM" \ + --target-ram "$TARGET_RAM" \ + --target-flash "$TARGET_FLASH" \ + --pr-ram "$PR_RAM" \ + --pr-flash "$PR_FLASH" \ + $target_json_arg \ + $pr_json_arg \ + $cache_flag + ci-status: name: CI Status runs-on: ubuntu-24.04 @@ -535,6 +823,9 @@ jobs: - test-build-components-splitter - test-build-components-split - pre-commit-ci-lite + - memory-impact-target-branch + - memory-impact-pr-branch + - memory-impact-comment if: always() steps: - name: Success diff --git a/esphome/__main__.py b/esphome/__main__.py index d9bdfb175b..a0b7d16ae9 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -466,7 +466,9 @@ def write_cpp_file() -> int: def compile_program(args: ArgsProtocol, config: ConfigType) -> int: from esphome import platformio_api - _LOGGER.info("Compiling app...") + # NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py + # If you change this format, update the regex in that script as well + _LOGGER.info("Compiling app... Build path: %s", CORE.build_path) rc = platformio_api.run_compile(config, CORE.verbose) if rc != 0: return rc diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py new file mode 100644 index 0000000000..71e86e3788 --- /dev/null +++ b/esphome/analyze_memory/__init__.py @@ -0,0 +1,502 @@ +"""Memory usage analyzer for ESPHome compiled binaries.""" + +from collections import defaultdict +from dataclasses import dataclass, field +import logging +from pathlib import Path +import re +import subprocess +from typing import TYPE_CHECKING + +from .const import ( + CORE_SUBCATEGORY_PATTERNS, + DEMANGLED_PATTERNS, + ESPHOME_COMPONENT_PATTERN, + SECTION_TO_ATTR, + SYMBOL_PATTERNS, +) +from .helpers import ( + get_component_class_patterns, + get_esphome_components, + map_section_name, + parse_symbol_line, +) + +if TYPE_CHECKING: + from esphome.platformio_api import IDEData + +_LOGGER = logging.getLogger(__name__) + +# GCC global constructor/destructor prefix annotations +_GCC_PREFIX_ANNOTATIONS = { + "_GLOBAL__sub_I_": "global constructor for", + "_GLOBAL__sub_D_": "global destructor for", +} + +# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2) +_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)") + +# C++ runtime patterns for categorization +_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"]) + +# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.) +_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"]) + +# Regex pattern for parsing readelf section headers +# Format: [ #] name type addr off size +_READELF_SECTION_PATTERN = re.compile( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)" +) + +# Component category prefixes +_COMPONENT_PREFIX_ESPHOME = "[esphome]" +_COMPONENT_PREFIX_EXTERNAL = "[external]" +_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core" +_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api" + +# C++ namespace prefixes +_NAMESPACE_ESPHOME = "esphome::" +_NAMESPACE_STD = "std::" + +# Type alias for symbol information: (symbol_name, size, component) +SymbolInfoType = tuple[str, int, str] + + +@dataclass +class MemorySection: + """Represents a memory section with its symbols.""" + + name: str + symbols: list[SymbolInfoType] = field(default_factory=list) + total_size: int = 0 + + +@dataclass +class ComponentMemory: + """Tracks memory usage for a component.""" + + name: str + text_size: int = 0 # Code in flash + rodata_size: int = 0 # Read-only data in flash + data_size: int = 0 # Initialized data (flash + ram) + bss_size: int = 0 # Uninitialized data (ram only) + symbol_count: int = 0 + + @property + def flash_total(self) -> int: + """Total flash usage (text + rodata + data).""" + return self.text_size + self.rodata_size + self.data_size + + @property + def ram_total(self) -> int: + """Total RAM usage (data + bss).""" + return self.data_size + self.bss_size + + +class MemoryAnalyzer: + """Analyzes memory usage from ELF files.""" + + def __init__( + self, + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + external_components: set[str] | None = None, + idedata: "IDEData | None" = None, + ) -> None: + """Initialize memory analyzer. + + Args: + elf_path: Path to ELF file to analyze + objdump_path: Path to objdump binary (auto-detected from idedata if not provided) + readelf_path: Path to readelf binary (auto-detected from idedata if not provided) + external_components: Set of external component names + idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths + """ + self.elf_path = Path(elf_path) + if not self.elf_path.exists(): + raise FileNotFoundError(f"ELF file not found: {elf_path}") + + # Auto-detect toolchain paths from idedata if not provided + if idedata is not None and (objdump_path is None or readelf_path is None): + objdump_path = objdump_path or idedata.objdump_path + readelf_path = readelf_path or idedata.readelf_path + _LOGGER.debug("Using toolchain paths from PlatformIO idedata") + + self.objdump_path = objdump_path or "objdump" + self.readelf_path = readelf_path or "readelf" + self.external_components = external_components or set() + + self.sections: dict[str, MemorySection] = {} + self.components: dict[str, ComponentMemory] = defaultdict( + lambda: ComponentMemory("") + ) + self._demangle_cache: dict[str, str] = {} + self._uncategorized_symbols: list[tuple[str, str, int]] = [] + self._esphome_core_symbols: list[ + tuple[str, str, int] + ] = [] # Track core symbols + self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) # Track symbols for all components + + def analyze(self) -> dict[str, ComponentMemory]: + """Analyze the ELF file and return component memory usage.""" + self._parse_sections() + self._parse_symbols() + self._categorize_symbols() + return dict(self.components) + + def _parse_sections(self) -> None: + """Parse section headers from ELF file.""" + result = subprocess.run( + [self.readelf_path, "-S", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Parse section headers + for line in result.stdout.splitlines(): + # Look for section entries + if not (match := _READELF_SECTION_PATTERN.match(line)): + continue + + section_name = match.group(1) + size_hex = match.group(2) + size = int(size_hex, 16) + + # Map to standard section name + mapped_section = map_section_name(section_name) + if not mapped_section: + continue + + if mapped_section not in self.sections: + self.sections[mapped_section] = MemorySection(mapped_section) + self.sections[mapped_section].total_size += size + + def _parse_symbols(self) -> None: + """Parse symbols from ELF file.""" + result = subprocess.run( + [self.objdump_path, "-t", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Track seen addresses to avoid duplicates + seen_addresses: set[str] = set() + + for line in result.stdout.splitlines(): + if not (symbol_info := parse_symbol_line(line)): + continue + + section, name, size, address = symbol_info + + # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) + if address in seen_addresses or section not in self.sections: + continue + + self.sections[section].symbols.append((name, size, "")) + seen_addresses.add(address) + + def _categorize_symbols(self) -> None: + """Categorize symbols by component.""" + # First, collect all unique symbol names for batch demangling + all_symbols = { + symbol_name + for section in self.sections.values() + for symbol_name, _, _ in section.symbols + } + + # Batch demangle all symbols at once + self._batch_demangle_symbols(list(all_symbols)) + + # Now categorize with cached demangled names + for section_name, section in self.sections.items(): + for symbol_name, size, _ in section.symbols: + component = self._identify_component(symbol_name) + + if component not in self.components: + self.components[component] = ComponentMemory(component) + + comp_mem = self.components[component] + comp_mem.symbol_count += 1 + + # Update the appropriate size attribute based on section + if attr_name := SECTION_TO_ATTR.get(section_name): + setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size) + + # Track uncategorized symbols + if component == "other" and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._uncategorized_symbols.append((symbol_name, demangled, size)) + + # Track ESPHome core symbols for detailed analysis + if component == _COMPONENT_CORE and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._esphome_core_symbols.append((symbol_name, demangled, size)) + + # Track all component symbols for detailed analysis + if size > 0: + demangled = self._demangle_symbol(symbol_name) + self._component_symbols[component].append( + (symbol_name, demangled, size) + ) + + def _identify_component(self, symbol_name: str) -> str: + """Identify which component a symbol belongs to.""" + # Demangle C++ names if needed + demangled = self._demangle_symbol(symbol_name) + + # Check for special component classes first (before namespace pattern) + # This handles cases like esphome::ESPHomeOTAComponent which should map to ota + if _NAMESPACE_ESPHOME in demangled: + # Check for special component classes that include component name in the class + # For example: esphome::ESPHomeOTAComponent -> ota component + for component_name in get_esphome_components(): + patterns = get_component_class_patterns(component_name) + if any(pattern in demangled for pattern in patterns): + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" + + # Check for ESPHome component namespaces + match = ESPHOME_COMPONENT_PATTERN.search(demangled) + if match: + component_name = match.group(1) + # Strip trailing underscore if present (e.g., switch_ -> switch) + component_name = component_name.rstrip("_") + + # Check if this is an actual component in the components directory + if component_name in get_esphome_components(): + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" + # Check if this is a known external component from the config + if component_name in self.external_components: + return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}" + # Everything else in esphome:: namespace is core + return _COMPONENT_CORE + + # Check for esphome core namespace (no component namespace) + if _NAMESPACE_ESPHOME in demangled: + # If no component match found, it's core + return _COMPONENT_CORE + + # Check against symbol patterns + for component, patterns in SYMBOL_PATTERNS.items(): + if any(pattern in symbol_name for pattern in patterns): + return component + + # Check against demangled patterns + for component, patterns in DEMANGLED_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return component + + # Special cases that need more complex logic + + # Check if spi_flash vs spi_driver + if "spi_" in symbol_name or "SPI" in symbol_name: + return "spi_flash" if "spi_flash" in symbol_name else "spi_driver" + + # libc special printf variants + if ( + symbol_name.startswith("_") + and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "") + in _LIBC_PRINTF_SCANF_FAMILY + ): + return "libc" + + # Track uncategorized symbols for analysis + return "other" + + def _batch_demangle_symbols(self, symbols: list[str]) -> None: + """Batch demangle C++ symbol names for efficiency.""" + if not symbols: + return + + # Try to find the appropriate c++filt for the platform + cppfilt_cmd = "c++filt" + + _LOGGER.info("Demangling %d symbols", len(symbols)) + _LOGGER.debug("objdump_path = %s", self.objdump_path) + + # Check if we have a toolchain-specific c++filt + if self.objdump_path and self.objdump_path != "objdump": + # Replace objdump with c++filt in the path + potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") + _LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt) + if Path(potential_cppfilt).exists(): + cppfilt_cmd = potential_cppfilt + _LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd) + else: + _LOGGER.info( + "✗ Toolchain c++filt not found at %s, using system c++filt", + potential_cppfilt, + ) + else: + _LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path) + + # Strip GCC optimization suffixes and prefixes before demangling + # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt + # Prefixes like _GLOBAL__sub_I_ need to be removed and tracked + symbols_stripped: list[str] = [] + symbols_prefixes: list[str] = [] # Track removed prefixes + for symbol in symbols: + # Remove GCC optimization markers + stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol) + + # Handle GCC global constructor/initializer prefixes + # _GLOBAL__sub_I_ -> extract for demangling + prefix = "" + for gcc_prefix in _GCC_PREFIX_ANNOTATIONS: + if stripped.startswith(gcc_prefix): + prefix = gcc_prefix + stripped = stripped[len(prefix) :] + break + + symbols_stripped.append(stripped) + symbols_prefixes.append(prefix) + + try: + # Send all symbols to c++filt at once + result = subprocess.run( + [cppfilt_cmd], + input="\n".join(symbols_stripped), + capture_output=True, + text=True, + check=False, + ) + except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: + # On error, cache originals + _LOGGER.warning("Failed to batch demangle symbols: %s", e) + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return + + if result.returncode != 0: + _LOGGER.warning( + "c++filt exited with code %d: %s", + result.returncode, + result.stderr[:200] if result.stderr else "(no error output)", + ) + # Cache originals on failure + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return + + # Process demangled output + self._process_demangled_output( + symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd + ) + + def _process_demangled_output( + self, + symbols: list[str], + symbols_stripped: list[str], + symbols_prefixes: list[str], + demangled_output: str, + cppfilt_cmd: str, + ) -> None: + """Process demangled symbol output and populate cache. + + Args: + symbols: Original symbol names + symbols_stripped: Stripped symbol names sent to c++filt + symbols_prefixes: Removed prefixes to restore + demangled_output: Output from c++filt + cppfilt_cmd: Path to c++filt command (for logging) + """ + demangled_lines = demangled_output.strip().split("\n") + failed_count = 0 + + for original, stripped, prefix, demangled in zip( + symbols, symbols_stripped, symbols_prefixes, demangled_lines + ): + # Add back any prefix that was removed + demangled = self._restore_symbol_prefix(prefix, stripped, demangled) + + # If we stripped a suffix, add it back to the demangled name for clarity + if original != stripped and not prefix: + demangled = self._restore_symbol_suffix(original, demangled) + + self._demangle_cache[original] = demangled + + # Log symbols that failed to demangle (stayed the same as stripped version) + if stripped == demangled and stripped.startswith("_Z"): + failed_count += 1 + if failed_count <= 5: # Only log first 5 failures + _LOGGER.warning("Failed to demangle: %s", original) + + if failed_count == 0: + _LOGGER.info("Successfully demangled all %d symbols", len(symbols)) + return + + _LOGGER.warning( + "Failed to demangle %d/%d symbols using %s", + failed_count, + len(symbols), + cppfilt_cmd, + ) + + @staticmethod + def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str: + """Restore prefix that was removed before demangling. + + Args: + prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_") + stripped: Stripped symbol name + demangled: Demangled symbol name + + Returns: + Demangled name with prefix restored/annotated + """ + if not prefix: + return demangled + + # Successfully demangled - add descriptive prefix + if demangled != stripped and ( + annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix) + ): + return f"[{annotation}: {demangled}]" + + # Failed to demangle - restore original prefix + return prefix + demangled + + @staticmethod + def _restore_symbol_suffix(original: str, demangled: str) -> str: + """Restore GCC optimization suffix that was removed before demangling. + + Args: + original: Original symbol name with suffix + demangled: Demangled symbol name without suffix + + Returns: + Demangled name with suffix annotation + """ + if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original): + return f"{demangled} [{suffix_match.group(1)}]" + return demangled + + def _demangle_symbol(self, symbol: str) -> str: + """Get demangled C++ symbol name from cache.""" + return self._demangle_cache.get(symbol, symbol) + + def _categorize_esphome_core_symbol(self, demangled: str) -> str: + """Categorize ESPHome core symbols into subcategories.""" + # Special patterns that need to be checked separately + if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS): + return "C++ Runtime (vtables/RTTI)" + + if demangled.startswith(_NAMESPACE_STD): + return "C++ STL" + + # Check against patterns from const.py + for category, patterns in CORE_SUBCATEGORY_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return category + + return "Other Core" + + +if __name__ == "__main__": + from .cli import main + + main() diff --git a/esphome/analyze_memory/__main__.py b/esphome/analyze_memory/__main__.py new file mode 100644 index 0000000000..aa772c3ad4 --- /dev/null +++ b/esphome/analyze_memory/__main__.py @@ -0,0 +1,6 @@ +"""Main entry point for running the memory analyzer as a module.""" + +from .cli import main + +if __name__ == "__main__": + main() diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py new file mode 100644 index 0000000000..1695a00c19 --- /dev/null +++ b/esphome/analyze_memory/cli.py @@ -0,0 +1,408 @@ +"""CLI interface for memory analysis with report generation.""" + +from collections import defaultdict +import sys + +from . import ( + _COMPONENT_API, + _COMPONENT_CORE, + _COMPONENT_PREFIX_ESPHOME, + _COMPONENT_PREFIX_EXTERNAL, + MemoryAnalyzer, +) + + +class MemoryAnalyzerCLI(MemoryAnalyzer): + """Memory analyzer with CLI-specific report generation.""" + + # Column width constants + COL_COMPONENT: int = 29 + COL_FLASH_TEXT: int = 14 + COL_FLASH_DATA: int = 14 + COL_RAM_DATA: int = 12 + COL_RAM_BSS: int = 12 + COL_TOTAL_FLASH: int = 15 + COL_TOTAL_RAM: int = 12 + COL_SEPARATOR: int = 3 # " | " + + # Core analysis column widths + COL_CORE_SUBCATEGORY: int = 30 + COL_CORE_SIZE: int = 12 + COL_CORE_COUNT: int = 6 + COL_CORE_PERCENT: int = 10 + + # Calculate table width once at class level + TABLE_WIDTH: int = ( + COL_COMPONENT + + COL_SEPARATOR + + COL_FLASH_TEXT + + COL_SEPARATOR + + COL_FLASH_DATA + + COL_SEPARATOR + + COL_RAM_DATA + + COL_SEPARATOR + + COL_RAM_BSS + + COL_SEPARATOR + + COL_TOTAL_FLASH + + COL_SEPARATOR + + COL_TOTAL_RAM + ) + + @staticmethod + def _make_separator_line(*widths: int) -> str: + """Create a separator line with given column widths. + + Args: + widths: Column widths to create separators for + + Returns: + Separator line like "----+---------+-----" + """ + return "-+-".join("-" * width for width in widths) + + # Pre-computed separator lines + MAIN_TABLE_SEPARATOR: str = _make_separator_line( + COL_COMPONENT, + COL_FLASH_TEXT, + COL_FLASH_DATA, + COL_RAM_DATA, + COL_RAM_BSS, + COL_TOTAL_FLASH, + COL_TOTAL_RAM, + ) + + CORE_TABLE_SEPARATOR: str = _make_separator_line( + COL_CORE_SUBCATEGORY, + COL_CORE_SIZE, + COL_CORE_COUNT, + COL_CORE_PERCENT, + ) + + def generate_report(self, detailed: bool = False) -> str: + """Generate a formatted memory report.""" + components = sorted( + self.components.items(), key=lambda x: x[1].flash_total, reverse=True + ) + + # Calculate totals + total_flash = sum(c.flash_total for _, c in components) + total_ram = sum(c.ram_total for _, c in components) + + # Build report + lines: list[str] = [] + + lines.append("=" * self.TABLE_WIDTH) + lines.append("Component Memory Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Main table - fixed column widths + lines.append( + f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}" + ) + lines.append(self.MAIN_TABLE_SEPARATOR) + + for name, mem in components: + if mem.flash_total > 0 or mem.ram_total > 0: + flash_rodata = mem.rodata_size + mem.data_size + lines.append( + f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | " + f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | " + f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B" + ) + + lines.append(self.MAIN_TABLE_SEPARATOR) + lines.append( + f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | " + f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | " + f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B" + ) + + # Top consumers + lines.append("") + lines.append("Top Flash Consumers:") + for i, (name, mem) in enumerate(components[:25]): + if mem.flash_total > 0: + percentage = ( + (mem.flash_total / total_flash * 100) if total_flash > 0 else 0 + ) + lines.append( + f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash" + ) + + lines.append("") + lines.append("Top RAM Consumers:") + ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True) + for i, (name, mem) in enumerate(ram_components[:25]): + if mem.ram_total > 0: + percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0 + lines.append( + f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM" + ) + + lines.append("") + lines.append( + "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." + ) + lines.append("=" * self.TABLE_WIDTH) + + # Add ESPHome core detailed analysis if there are core symbols + if self._esphome_core_symbols: + lines.append("") + lines.append("=" * self.TABLE_WIDTH) + lines.append( + f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH) + ) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Group core symbols by subcategory + core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) + + for symbol, demangled, size in self._esphome_core_symbols: + # Categorize based on demangled name patterns + subcategory = self._categorize_esphome_core_symbol(demangled) + core_subcategories[subcategory].append((symbol, demangled, size)) + + # Sort subcategories by total size + sorted_subcategories = sorted( + [ + (name, symbols, sum(s[2] for s in symbols)) + for name, symbols in core_subcategories.items() + ], + key=lambda x: x[2], + reverse=True, + ) + + lines.append( + f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | " + f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}" + ) + lines.append(self.CORE_TABLE_SEPARATOR) + + core_total = sum(size for _, _, size in self._esphome_core_symbols) + + for subcategory, symbols, total_size in sorted_subcategories: + percentage = (total_size / core_total * 100) if core_total > 0 else 0 + lines.append( + f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | " + f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%" + ) + + # Top 15 largest core symbols + lines.append("") + lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:") + sorted_core_symbols = sorted( + self._esphome_core_symbols, key=lambda x: x[2], reverse=True + ) + + for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * self.TABLE_WIDTH) + + # Add detailed analysis for top ESPHome and external components + esphome_components = [ + (name, mem) + for name, mem in components + if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE + ] + external_components = [ + (name, mem) + for name, mem in components + if name.startswith(_COMPONENT_PREFIX_EXTERNAL) + ] + + top_esphome_components = sorted( + esphome_components, key=lambda x: x[1].flash_total, reverse=True + )[:30] + + # Include all external components (they're usually important) + top_external_components = sorted( + external_components, key=lambda x: x[1].flash_total, reverse=True + ) + + # Check if API component exists and ensure it's included + api_component = None + for name, mem in components: + if name == _COMPONENT_API: + api_component = (name, mem) + break + + # Combine all components to analyze: top ESPHome + all external + API if not already included + components_to_analyze = list(top_esphome_components) + list( + top_external_components + ) + if api_component and api_component not in components_to_analyze: + components_to_analyze.append(api_component) + + if components_to_analyze: + for comp_name, comp_mem in components_to_analyze: + if not (comp_symbols := self._component_symbols.get(comp_name, [])): + continue + lines.append("") + lines.append("=" * self.TABLE_WIDTH) + lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Sort symbols by size + sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True) + + lines.append(f"Total symbols: {len(sorted_symbols)}") + lines.append(f"Total size: {comp_mem.flash_total:,} B") + lines.append("") + + # Show all symbols > 100 bytes for better visibility + large_symbols = [ + (sym, dem, size) for sym, dem, size in sorted_symbols if size > 100 + ] + + lines.append( + f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" + ) + for i, (symbol, demangled, size) in enumerate(large_symbols): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * self.TABLE_WIDTH) + + return "\n".join(lines) + + def dump_uncategorized_symbols(self, output_file: str | None = None) -> None: + """Dump uncategorized symbols for analysis.""" + # Sort by size descending + sorted_symbols = sorted( + self._uncategorized_symbols, key=lambda x: x[2], reverse=True + ) + + lines = ["Uncategorized Symbols Analysis", "=" * 80] + lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}") + lines.append( + f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes" + ) + lines.append("") + lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled") + lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) + + for symbol, demangled, size in sorted_symbols[:100]: # Top 100 + demangled_display = ( + demangled[:100] if symbol != demangled else "[not demangled]" + ) + lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}") + + if len(sorted_symbols) > 100: + lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") + + content = "\n".join(lines) + + if output_file: + with open(output_file, "w", encoding="utf-8") as f: + f.write(content) + else: + print(content) + + +def analyze_elf( + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + detailed: bool = False, + external_components: set[str] | None = None, +) -> str: + """Analyze an ELF file and return a memory report.""" + analyzer = MemoryAnalyzerCLI( + elf_path, objdump_path, readelf_path, external_components + ) + analyzer.analyze() + return analyzer.generate_report(detailed) + + +def main(): + """CLI entrypoint for memory analysis.""" + if len(sys.argv) < 2: + print("Usage: python -m esphome.analyze_memory ") + print("\nAnalyze memory usage from an ESPHome build directory.") + print("The build directory should contain firmware.elf and idedata will be") + print("loaded from ~/.esphome/.internal/idedata/.json") + print("\nExamples:") + print(" python -m esphome.analyze_memory ~/.esphome/build/my-device") + print(" python -m esphome.analyze_memory .esphome/build/my-device") + print(" python -m esphome.analyze_memory my-device # Short form") + sys.exit(1) + + build_dir = sys.argv[1] + + # Load build directory + import json + from pathlib import Path + + from esphome.platformio_api import IDEData + + build_path = Path(build_dir) + + # If no path separator in name, assume it's a device name + if "/" not in build_dir and not build_path.is_dir(): + # Try current directory first + cwd_path = Path.cwd() / ".esphome" / "build" / build_dir + if cwd_path.is_dir(): + build_path = cwd_path + print(f"Using build directory: {build_path}", file=sys.stderr) + else: + # Fall back to home directory + build_path = Path.home() / ".esphome" / "build" / build_dir + print(f"Using build directory: {build_path}", file=sys.stderr) + + if not build_path.is_dir(): + print(f"Error: {build_path} is not a directory", file=sys.stderr) + sys.exit(1) + + # Find firmware.elf + elf_file = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_file = str(elf_candidate) + break + + if not elf_file: + print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr) + sys.exit(1) + + # Find idedata.json - check current directory first, then home + device_name = build_path.name + idedata_candidates = [ + Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json", + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + ] + + idedata = None + for idedata_path in idedata_candidates: + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + + if not idedata: + print( + f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})", + file=sys.stderr, + ) + + analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata) + analyzer.analyze() + report = analyzer.generate_report() + print(report) + + +if __name__ == "__main__": + main() diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py new file mode 100644 index 0000000000..c60b70aeec --- /dev/null +++ b/esphome/analyze_memory/const.py @@ -0,0 +1,903 @@ +"""Constants for memory analysis symbol pattern matching.""" + +import re + +# Pattern to extract ESPHome component namespaces dynamically +ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") + +# Section mapping for ELF file sections +# Maps standard section names to their various platform-specific variants +SECTION_MAPPING = { + ".text": frozenset([".text", ".iram"]), + ".rodata": frozenset([".rodata"]), + ".data": frozenset([".data", ".dram"]), + ".bss": frozenset([".bss"]), +} + +# Section to ComponentMemory attribute mapping +# Maps section names to the attribute name in ComponentMemory dataclass +SECTION_TO_ATTR = { + ".text": "text_size", + ".rodata": "rodata_size", + ".data": "data_size", + ".bss": "bss_size", +} + +# Component identification rules +# Symbol patterns: patterns found in raw symbol names +SYMBOL_PATTERNS = { + "freertos": [ + "vTask", + "xTask", + "xQueue", + "pvPort", + "vPort", + "uxTask", + "pcTask", + "prvTimerTask", + "prvAddNewTaskToReadyList", + "pxReadyTasksLists", + "prvAddCurrentTaskToDelayedList", + "xEventGroupWaitBits", + "xRingbufferSendFromISR", + "prvSendItemDoneNoSplit", + "prvReceiveGeneric", + "prvSendAcquireGeneric", + "prvCopyItemAllowSplit", + "xEventGroup", + "xRingbuffer", + "prvSend", + "prvReceive", + "prvCopy", + "xPort", + "ulTaskGenericNotifyTake", + "prvIdleTask", + "prvInitialiseNewTask", + "prvIsYieldRequiredSMP", + "prvGetItemByteBuf", + "prvInitializeNewRingbuffer", + "prvAcquireItemNoSplit", + "prvNotifyQueueSetContainer", + "ucStaticTimerQueueStorage", + "eTaskGetState", + "main_task", + "do_system_init_fn", + "xSemaphoreCreateGenericWithCaps", + "vListInsert", + "uxListRemove", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "prvCheckItemFitsByteBuffer", + "prvGetCurMaxSizeAllowSplit", + "tick_hook", + "sys_sem_new", + "sys_arch_mbox_fetch", + "sys_arch_sem_wait", + "prvDeleteTCB", + "vQueueDeleteWithCaps", + "vRingbufferDeleteWithCaps", + "vSemaphoreDeleteWithCaps", + "prvCheckItemAvail", + "prvCheckTaskCanBeScheduledSMP", + "prvGetCurMaxSizeNoSplit", + "prvResetNextTaskUnblockTime", + "prvReturnItemByteBuf", + "vApplicationStackOverflowHook", + "vApplicationGetIdleTaskMemory", + "sys_init", + "sys_mbox_new", + "sys_arch_mbox_tryfetch", + ], + "xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"], + "heap": ["heap_", "multi_heap"], + "spi_flash": ["spi_flash"], + "rtc": ["rtc_", "rtcio_ll_"], + "gpio_driver": ["gpio_", "pins"], + "uart_driver": ["uart", "_uart", "UART"], + "timer": ["timer_", "esp_timer"], + "peripherals": ["periph_", "periman"], + "network_stack": [ + "vj_compress", + "raw_sendto", + "raw_input", + "etharp_", + "icmp_input", + "socket_ipv6", + "ip_napt", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + "netconn_", + "recv_raw", + "accept_function", + "netconn_recv_data", + "netconn_accept", + "netconn_write_vectors_partly", + "netconn_drain", + "raw_connect", + "raw_bind", + "icmp_send_response", + "sockets", + "icmp_dest_unreach", + "inet_chksum_pseudo", + "alloc_socket", + "done_socket", + "set_global_fd_sets", + "inet_chksum_pbuf", + "tryget_socket_unconn_locked", + "tryget_socket_unconn", + "cs_create_ctrl_sock", + "netbuf_alloc", + ], + "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "ap_", + "scan_", + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_", + "mac_", + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + "s_do_mapping", + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + ], + "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], + "wifi_bt_coex": ["coex"], + "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], + "bluedroid_bt": [ + "bluedroid", + "btc_", + "bta_", + "btm_", + "btu_", + "BTM_", + "GATT", + "L2CA_", + "smp_", + "gatts_", + "attp_", + "l2cu_", + "l2cb", + "smp_cb", + "BTA_GATTC_", + "SMP_", + "BTU_", + "BTA_Dm", + "GAP_Ble", + "BT_tx_if", + "host_recv_pkt_cb", + "saved_local_oob_data", + "string_to_bdaddr", + "string_is_bdaddr", + "CalConnectParamTimeout", + "transmit_fragment", + "transmit_data", + "event_command_ready", + "read_command_complete_header", + "parse_read_local_extended_features_response", + "parse_read_local_version_info_response", + "should_request_high", + "btdm_wakeup_request", + "BTA_SetAttributeValue", + "BTA_EnableBluetooth", + "transmit_command_futured", + "transmit_command", + "get_waiting_command", + "make_command", + "transmit_downward", + "host_recv_adv_packet", + "copy_extra_byte_in_db", + "parse_read_local_supported_commands_response", + ], + "crypto_math": [ + "ecp_", + "bignum_", + "mpi_", + "sswu", + "modp", + "dragonfly_", + "gcm_mult", + "__multiply", + "quorem", + "__mdiff", + "__lshift", + "__mprec_tens", + "ECC_", + "multiprecision_", + "mix_sub_columns", + "sbox", + "gfm2_sbox", + "gfm3_sbox", + "curve_p256", + "curve", + "p_256_init_curve", + "shift_sub_rows", + "rshift", + ], + "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], + "libc": [ + "printf", + "scanf", + "malloc", + "free", + "memcpy", + "memset", + "strcpy", + "strlen", + "_dtoa", + "_fopen", + "__sfvwrite_r", + "qsort", + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + "strncpy", + "_strtod_l", + "__gethex", + "__hexnan", + "_setenv_r", + "_tzset_unlocked_r", + "__tzcalc_limits", + "select", + "scalbnf", + "strtof", + "strtof_l", + "__d2b", + "__b2d", + "__s2b", + "_Balloc", + "__multadd", + "__lo0bits", + "__atexit0", + "__smakebuf_r", + "__swhatbuf_r", + "_sungetc_r", + "_close_r", + "_link_r", + "_unsetenv_r", + "_rename_r", + "__month_lengths", + "tzinfo", + "__ratio", + "__hi0bits", + "__ulp", + "__any_on", + "__copybits", + "L_shift", + "_fcntl_r", + "_lseek_r", + "_read_r", + "_write_r", + "_unlink_r", + "_fstat_r", + "access", + "fsync", + "tcsetattr", + "tcgetattr", + "tcflush", + "tcdrain", + "__ssrefill_r", + "_stat_r", + "__hexdig_fun", + "__mcmp", + "_fwalk_sglue", + "__fpclassifyf", + "_setlocale_r", + "_mbrtowc_r", + "fcntl", + "__match", + "_lock_close", + "__c$", + "__func__$", + "__FUNCTION__$", + "DAYS_IN_MONTH", + "_DAYS_BEFORE_MONTH", + "CSWTCH$", + "dst$", + "sulp", + ], + "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], + "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], + "file_io": [ + "fread", + "fwrite", + "fopen", + "fclose", + "fseek", + "ftell", + "fflush", + "s_fd_table", + ], + "string_formatting": [ + "snprintf", + "vsnprintf", + "sprintf", + "vsprintf", + "sscanf", + "vsscanf", + ], + "cpp_anonymous": ["_GLOBAL__N_", "n$"], + "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], + "static_init": ["_GLOBAL__sub_I_"], + "mdns_lib": ["mdns"], + "phy_radio": [ + "phy_", + "rf_", + "chip_", + "register_chipv7", + "pbus_", + "bb_", + "fe_", + "rfcal_", + "ram_rfcal", + "tx_pwctrl", + "rx_chan", + "set_rx_gain", + "set_chan", + "agc_reg", + "ram_txiq", + "ram_txdc", + "ram_gen_rx_gain", + "rx_11b_opt", + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "pwdet_sar2_init", + "ram_iq_est_enable", + "ram_rfpll_set_freq", + "ant_wifirx_cfg", + "ant_btrx_cfg", + "force_txrxoff", + "force_txrx_off", + "tx_paon_set", + "opt_11b_resart", + "rfpll_1p2_opt", + "ram_dc_iq_est", + "ram_start_tx_tone", + "ram_en_pwdet", + "ram_cbw2040_cfg", + "rxdc_est_min", + "i2cmst_reg_init", + "temprature_sens_read", + "ram_restart_cal", + "ram_write_gain_mem", + "ram_wait_rfpll_cal_end", + "txcal_debuge_mode", + "ant_wifitx_cfg", + "reg_init_begin", + ], + "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], + "wifi_lmac": ["lmac"], + "wifi_device": ["wdev", "wDev_"], + "power_mgmt": [ + "pm_", + "sleep", + "rtc_sleep", + "light_sleep", + "deep_sleep", + "power_down", + "g_pm", + ], + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + ], + "hal_layer": ["hal_"], + "clock_mgmt": [ + "clk_", + "clock_", + "rtc_clk", + "apb_", + "cpu_freq", + "setCpuFrequencyMhz", + ], + "cache_mgmt": ["cache"], + "flash_ops": ["flash", "image_load"], + "interrupt_handlers": [ + "isr", + "interrupt", + "intr_", + "exc_", + "exception", + "port_IntStack", + ], + "wrapper_functions": ["_wrapper"], + "error_handling": ["panic", "abort", "assert", "error_", "fault"], + "authentication": ["auth"], + "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], + "dhcp": ["dhcp", "handle_dhcp"], + "ethernet_phy": [ + "emac_", + "eth_phy_", + "phy_tlk110", + "phy_lan87", + "phy_ip101", + "phy_rtl", + "phy_dp83", + "phy_ksz", + "lan87xx_", + "rtl8201_", + "ip101_", + "ksz80xx_", + "jl1101_", + "dp83848_", + "eth_on_state_changed", + ], + "threading": ["pthread_", "thread_", "_task_"], + "pthread": ["pthread"], + "synchronization": ["mutex", "semaphore", "spinlock", "portMUX"], + "math_lib": [ + "sin", + "cos", + "tan", + "sqrt", + "pow", + "exp", + "log", + "atan", + "asin", + "acos", + "floor", + "ceil", + "fabs", + "round", + ], + "random": ["rand", "random", "rng_", "prng"], + "time_lib": [ + "time", + "clock", + "gettimeofday", + "settimeofday", + "localtime", + "gmtime", + "mktime", + "strftime", + ], + "console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"], + "rom_functions": ["r_", "rom_"], + "compiler_runtime": [ + "__divdi3", + "__udivdi3", + "__moddi3", + "__muldi3", + "__ashldi3", + "__ashrdi3", + "__lshrdi3", + "__cmpdi2", + "__fixdfdi", + "__floatdidf", + ], + "libgcc": ["libgcc", "_divdi3", "_udivdi3"], + "boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"], + "bootloader": ["bootloader_", "esp_bootloader"], + "app_framework": ["app_", "initArduino", "setup", "loop", "Update"], + "weak_symbols": ["__weak_"], + "compiler_builtins": ["__builtin_"], + "vfs": ["vfs_", "VFS"], + "esp32_sdk": ["esp32_", "esp32c", "esp32s"], + "usb": ["usb_", "USB", "cdc_", "CDC"], + "i2c_driver": ["i2c_", "I2C"], + "i2s_driver": ["i2s_", "I2S"], + "spi_driver": ["spi_", "SPI"], + "adc_driver": ["adc_", "ADC"], + "dac_driver": ["dac_", "DAC"], + "touch_driver": ["touch_", "TOUCH"], + "pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"], + "rmt_driver": ["rmt_", "RMT"], + "pcnt_driver": ["pcnt_", "PCNT"], + "can_driver": ["can_", "CAN", "twai_", "TWAI"], + "sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"], + "temp_sensor": ["temp_sensor", "tsens_"], + "watchdog": ["wdt_", "WDT", "watchdog"], + "brownout": ["brownout", "bod_"], + "ulp": ["ulp_", "ULP"], + "psram": ["psram", "PSRAM", "spiram", "SPIRAM"], + "efuse": ["efuse", "EFUSE"], + "partition": ["partition", "esp_partition"], + "esp_event": ["esp_event", "event_loop", "event_callback"], + "esp_console": ["esp_console", "console_"], + "chip_specific": ["chip_", "esp_chip"], + "esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"], + "ipc": ["esp_ipc", "ipc_"], + "wifi_config": [ + "g_cnxMgr", + "gChmCxt", + "g_ic", + "TxRxCxt", + "s_dp", + "s_ni", + "s_reg_dump", + "packet$", + "d_mult_table", + "K", + "fcstab", + ], + "smartconfig": ["sc_ack_send"], + "rc_calibration": ["rc_cal", "rcUpdate"], + "noise_floor": ["noise_check"], + "rf_calibration": [ + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "rx_11b_opt", + ], + "wifi_crypto": [ + "pk_use_ecparams", + "process_segments", + "ccmp_", + "rc4_", + "aria_", + "mgf_mask", + "dh_group", + "ccmp_aad_nonce", + "ccmp_encrypt", + "rc4_skip", + "aria_sb1", + "aria_sb2", + "aria_is1", + "aria_is2", + "aria_sl", + "aria_a", + ], + "radio_control": ["fsm_input", "fsm_sconfreq"], + "pbuf": [ + "pbuf_", + ], + "event_group": ["xEventGroup"], + "ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"], + "provisioning": ["prov_", "prov_stop_and_notify"], + "scan": ["gScanStruct"], + "port": ["xPort"], + "elf_loader": [ + "elf_add", + "elf_add_note", + "elf_add_segment", + "process_image", + "read_encoded", + "read_encoded_value", + "read_encoded_value_with_base", + "process_image_header", + ], + "socket_api": [ + "sockets", + "netconn_", + "accept_function", + "recv_raw", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + ], + "igmp": ["igmp_", "igmp_send", "igmp_input"], + "icmp6": ["icmp6_"], + "arp": ["arp_table"], + "ampdu": [ + "ampdu_", + "rcAmpdu", + "trc_onAmpduOp", + "rcAmpduLowerRate", + "ampdu_dispatch_upto", + ], + "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], + "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], + "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], + "channel_mgmt": ["chm_init", "chm_set_current_channel"], + "trace": ["trc_init", "trc_onAmpduOp"], + "country_code": ["country_info", "country_info_24ghz"], + "multicore": ["do_multicore_settings"], + "Update_lib": ["Update"], + "stdio": [ + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + ], + "strncpy_ops": ["strncpy"], + "math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"], + "character_class": ["__chclass"], + "camellia": ["camellia_", "camellia_feistel"], + "crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"], + "event_buffer": ["g_eb_list_desc", "eb_space"], + "base_node": ["base_node_", "base_node_add_handler"], + "file_descriptor": ["s_fd_table"], + "tx_delay": ["tx_delay_cfg"], + "deinit": ["deinit_functions"], + "lcp_echo": ["LcpEchoCheck"], + "raw_api": ["raw_bind", "raw_connect"], + "checksum": ["process_checksum"], + "entry_management": ["add_entry"], + "esp_ota": ["esp_ota", "ota_", "read_otadata"], + "http_server": [ + "httpd_", + "parse_url_char", + "cb_headers_complete", + "delete_entry", + "validate_structure", + "config_save", + "config_new", + "verify_url", + "cb_url", + ], + "misc_system": [ + "alarm_cbs", + "start_up", + "tokens", + "unhex", + "osi_funcs_ro", + "enum_function", + "fragment_and_dispatch", + "alarm_set", + "osi_alarm_new", + "config_set_string", + "config_update_newest_section", + "config_remove_key", + "method_strings", + "interop_match", + "interop_database", + "__state_table", + "__action_table", + "s_stub_table", + "s_context", + "s_mmu_ctx", + "s_get_bus_mask", + "hli_queue_put", + "list_remove", + "list_delete", + "lock_acquire_generic", + "is_vect_desc_usable", + "io_mode_str", + "__c$20233", + "interface", + "read_id_core", + "subscribe_idle", + "unsubscribe_idle", + "s_clkout_handle", + "lock_release_generic", + "config_set_int", + "config_get_int", + "config_get_string", + "config_has_key", + "config_remove_section", + "osi_alarm_init", + "osi_alarm_deinit", + "fixed_queue_enqueue", + "fixed_queue_dequeue", + "fixed_queue_new", + "fixed_pkt_queue_enqueue", + "fixed_pkt_queue_new", + "list_append", + "list_prepend", + "list_insert_after", + "list_contains", + "list_get_node", + "hash_function_blob", + "cb_no_body", + "cb_on_body", + "profile_tab", + "get_arg", + "trim", + "buf$", + "process_appended_hash_and_sig$constprop$0", + "uuidType", + "allocate_svc_db_buf", + "_hostname_is_ours", + "s_hli_handlers", + "tick_cb", + "idle_cb", + "input", + "entry_find", + "section_find", + "find_bucket_entry_", + "config_has_section", + "hli_queue_create", + "hli_queue_get", + "hli_c_handler", + "future_ready", + "future_await", + "future_new", + "pkt_queue_enqueue", + "pkt_queue_dequeue", + "pkt_queue_cleanup", + "pkt_queue_create", + "pkt_queue_destroy", + "fixed_pkt_queue_dequeue", + "osi_alarm_cancel", + "osi_alarm_is_active", + "osi_sem_take", + "osi_event_create", + "osi_event_bind", + "alarm_cb_handler", + "list_foreach", + "list_back", + "list_front", + "list_clear", + "fixed_queue_try_peek_first", + "translate_path", + "get_idx", + "find_key", + "init", + "end", + "start", + "set_read_value", + "copy_address_list", + "copy_and_key", + "sdk_cfg_opts", + "leftshift_onebit", + "config_section_end", + "config_section_begin", + "find_entry_and_check_all_reset", + "image_validate", + "xPendingReadyList", + "vListInitialise", + "lock_init_generic", + "ant_bttx_cfg", + "ant_dft_cfg", + "cs_send_to_ctrl_sock", + "config_llc_util_funcs_reset", + "make_set_adv_report_flow_control", + "make_set_event_mask", + "raw_new", + "raw_remove", + "BTE_InitStack", + "parse_read_local_supported_features_response", + "__math_invalidf", + "tinytens", + "__mprec_tinytens", + "__mprec_bigtens", + "vRingbufferDelete", + "vRingbufferDeleteWithCaps", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "get_acl_data_size_ble", + "get_features_ble", + "get_features_classic", + "get_acl_packet_size_ble", + "get_acl_packet_size_classic", + "supports_extended_inquiry_response", + "supports_rssi_with_inquiry_results", + "supports_interlaced_inquiry_scan", + "supports_reading_remote_extended_features", + ], + "bluetooth_ll": [ + "lld_pdu_", + "ld_acl_", + "lld_stop_ind_handler", + "lld_evt_winsize_change", + "config_lld_evt_funcs_reset", + "config_lld_funcs_reset", + "config_llm_funcs_reset", + "llm_set_long_adv_data", + "lld_retry_tx_prog", + "llc_link_sup_to_ind_handler", + "config_llc_funcs_reset", + "lld_evt_rxwin_compute", + "config_btdm_funcs_reset", + "config_ea_funcs_reset", + "llc_defalut_state_tab_reset", + "config_rwip_funcs_reset", + "ke_lmp_rx_flooding_detect", + ], +} + +# Demangled patterns: patterns found in demangled C++ names +DEMANGLED_PATTERNS = { + "gpio_driver": ["GPIO"], + "uart_driver": ["UART"], + "network_stack": [ + "lwip", + "tcp", + "udp", + "ip4", + "ip6", + "dhcp", + "dns", + "netif", + "ethernet", + "ppp", + "slip", + ], + "wifi_stack": ["NetworkInterface"], + "nimble_bt": [ + "nimble", + "NimBLE", + "ble_hs", + "ble_gap", + "ble_gatt", + "ble_att", + "ble_l2cap", + "ble_sm", + ], + "crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"], + "cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"], + "static_init": ["__static_initialization"], + "rtti": ["__type_info", "__class_type_info"], + "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], + "async_tcp": ["AsyncClient", "AsyncServer"], + "mdns_lib": ["mdns"], + "json_lib": [ + "ArduinoJson", + "JsonDocument", + "JsonArray", + "JsonObject", + "deserialize", + "serialize", + ], + "http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"], + "logging": ["log", "Log", "print", "Print", "diag_"], + "authentication": ["checkDigestAuthentication"], + "libgcc": ["libgcc"], + "esp_system": ["esp_", "ESP"], + "arduino": ["arduino"], + "nvs": ["nvs_", "_ZTVN3nvs", "nvs::"], + "filesystem": ["spiffs", "vfs"], + "libc": ["newlib"], +} + +# Patterns for categorizing ESPHome core symbols into subcategories +CORE_SUBCATEGORY_PATTERNS = { + "Component Framework": ["Component"], + "Application Core": ["Application"], + "Scheduler": ["Scheduler"], + "Component Iterator": ["ComponentIterator"], + "Helper Functions": ["Helpers", "helpers"], + "Preferences/Storage": ["Preferences", "ESPPreferences"], + "I/O Utilities": ["HighFrequencyLoopRequester"], + "String Utilities": ["str_"], + "Bit Utilities": ["reverse_bits"], + "Data Conversion": ["convert_"], + "Network Utilities": ["network", "IPAddress"], + "API Protocol": ["api::"], + "WiFi Manager": ["wifi::"], + "MQTT Client": ["mqtt::"], + "Logger": ["logger::"], + "OTA Updates": ["ota::"], + "Web Server": ["web_server::"], + "Time Management": ["time::"], + "Sensor Framework": ["sensor::"], + "Binary Sensor": ["binary_sensor::"], + "Switch Framework": ["switch_::"], + "Light Framework": ["light::"], + "Climate Framework": ["climate::"], + "Cover Framework": ["cover::"], +} diff --git a/esphome/analyze_memory/helpers.py b/esphome/analyze_memory/helpers.py new file mode 100644 index 0000000000..cb503b37c5 --- /dev/null +++ b/esphome/analyze_memory/helpers.py @@ -0,0 +1,121 @@ +"""Helper functions for memory analysis.""" + +from functools import cache +from pathlib import Path + +from .const import SECTION_MAPPING + +# Import namespace constant from parent module +# Note: This would create a circular import if done at module level, +# so we'll define it locally here as well +_NAMESPACE_ESPHOME = "esphome::" + + +# Get the list of actual ESPHome components by scanning the components directory +@cache +def get_esphome_components(): + """Get set of actual ESPHome components from the components directory.""" + # Find the components directory relative to this file + # Go up two levels from analyze_memory/helpers.py to esphome/ + current_dir = Path(__file__).parent.parent + components_dir = current_dir / "components" + + if not components_dir.exists() or not components_dir.is_dir(): + return frozenset() + + return frozenset( + item.name + for item in components_dir.iterdir() + if item.is_dir() + and not item.name.startswith(".") + and not item.name.startswith("__") + ) + + +@cache +def get_component_class_patterns(component_name: str) -> list[str]: + """Generate component class name patterns for symbol matching. + + Args: + component_name: The component name (e.g., "ota", "wifi", "api") + + Returns: + List of pattern strings to match against demangled symbols + """ + component_upper = component_name.upper() + component_camel = component_name.replace("_", "").title() + return [ + f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + ] + + +def map_section_name(raw_section: str) -> str | None: + """Map raw section name to standard section. + + Args: + raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1") + + Returns: + Standard section name (".text", ".rodata", ".data", ".bss") or None + """ + for standard_section, patterns in SECTION_MAPPING.items(): + if any(pattern in raw_section for pattern in patterns): + return standard_section + return None + + +def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: + """Parse a single symbol line from objdump output. + + Args: + line: Line from objdump -t output + + Returns: + Tuple of (section, name, size, address) or None if not a valid symbol. + Format: address l/g w/d F/O section size name + Example: 40084870 l F .iram0.text 00000000 _xt_user_exc + """ + parts = line.split() + if len(parts) < 5: + return None + + try: + # Validate and extract address + address = parts[0] + int(address, 16) + except ValueError: + return None + + # Look for F (function) or O (object) flag + if "F" not in parts and "O" not in parts: + return None + + # Find section, size, and name + for i, part in enumerate(parts): + if not part.startswith("."): + continue + + section = map_section_name(part) + if not section: + break + + # Need at least size field after section + if i + 1 >= len(parts): + break + + try: + size = int(parts[i + 1], 16) + except ValueError: + break + + # Need symbol name and non-zero size + if i + 2 >= len(parts) or size == 0: + break + + name = " ".join(parts[i + 2 :]) + return (section, name, size, address) + + return None diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index 9418c1c7d3..d59523a74a 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -374,3 +374,23 @@ class IDEData: return f"{self.cc_path[:-7]}addr2line.exe" return f"{self.cc_path[:-3]}addr2line" + + @property + def objdump_path(self) -> str: + # replace gcc at end with objdump + path = self.cc_path + return ( + f"{path[:-7]}objdump.exe" + if path.endswith(".exe") + else f"{path[:-3]}objdump" + ) + + @property + def readelf_path(self) -> str: + # replace gcc at end with readelf + path = self.cc_path + return ( + f"{path[:-7]}readelf.exe" + if path.endswith(".exe") + else f"{path[:-3]}readelf" + ) diff --git a/script/analyze_component_buses.py b/script/analyze_component_buses.py index d0882e22e9..78f5ca3344 100755 --- a/script/analyze_component_buses.py +++ b/script/analyze_component_buses.py @@ -34,6 +34,8 @@ from typing import Any # Add esphome to path sys.path.insert(0, str(Path(__file__).parent.parent)) +from helpers import BASE_BUS_COMPONENTS + from esphome import yaml_util from esphome.config_helpers import Extend, Remove @@ -67,18 +69,6 @@ NO_BUSES_SIGNATURE = "no_buses" # Isolated components have unique signatures and cannot be merged with others ISOLATED_SIGNATURE_PREFIX = "isolated_" -# Base bus components - these ARE the bus implementations and should not -# be flagged as needing migration since they are the platform/base components -BASE_BUS_COMPONENTS = { - "i2c", - "spi", - "uart", - "modbus", - "canbus", - "remote_transmitter", - "remote_receiver", -} - # Components that must be tested in isolation (not grouped or batched with others) # These have known build issues that prevent grouping # NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py diff --git a/script/ci_helpers.py b/script/ci_helpers.py new file mode 100755 index 0000000000..48b0e4bbfe --- /dev/null +++ b/script/ci_helpers.py @@ -0,0 +1,23 @@ +"""Common helper functions for CI scripts.""" + +from __future__ import annotations + +import os + + +def write_github_output(outputs: dict[str, str | int]) -> None: + """Write multiple outputs to GITHUB_OUTPUT or stdout. + + When running in GitHub Actions, writes to the GITHUB_OUTPUT file. + When running locally, writes to stdout for debugging. + + Args: + outputs: Dictionary of key-value pairs to write + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output: + with open(github_output, "a", encoding="utf-8") as f: + f.writelines(f"{key}={value}\n" for key, value in outputs.items()) + else: + for key, value in outputs.items(): + print(f"{key}={value}") diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py new file mode 100755 index 0000000000..4e3fbb9086 --- /dev/null +++ b/script/ci_memory_impact_comment.py @@ -0,0 +1,570 @@ +#!/usr/bin/env python3 +"""Post or update a PR comment with memory impact analysis results. + +This script creates or updates a GitHub PR comment with memory usage changes. +It uses the GitHub CLI (gh) to manage comments and maintains a single comment +that gets updated on subsequent runs. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import subprocess +import sys + +from jinja2 import Environment, FileSystemLoader + +# Add esphome to path for analyze_memory import +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position + +# Comment marker to identify our memory impact comments +COMMENT_MARKER = "" + +# Thresholds for emoji significance indicators (percentage) +OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes +COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes + +# Display limits for tables +MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table +MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show +MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show +MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show + +# Symbol display formatting +SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using
tag +SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary + +# Component change noise threshold +COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes + +# Template directory +TEMPLATE_DIR = Path(__file__).parent / "templates" + + +def load_analysis_json(json_path: str) -> dict | None: + """Load memory analysis results from JSON file. + + Args: + json_path: Path to analysis JSON file + + Returns: + Dictionary with analysis results or None if file doesn't exist/can't be loaded + """ + json_file = Path(json_path) + if not json_file.exists(): + print(f"Analysis JSON not found: {json_path}", file=sys.stderr) + return None + + try: + with open(json_file, encoding="utf-8") as f: + return json.load(f) + except (json.JSONDecodeError, OSError) as e: + print(f"Failed to load analysis JSON: {e}", file=sys.stderr) + return None + + +def format_bytes(bytes_value: int) -> str: + """Format bytes value with comma separators. + + Args: + bytes_value: Number of bytes + + Returns: + Formatted string with comma separators (e.g., "1,234 bytes") + """ + return f"{bytes_value:,} bytes" + + +def format_change(before: int, after: int, threshold: float | None = None) -> str: + """Format memory change with delta and percentage. + + Args: + before: Memory usage before change (in bytes) + after: Memory usage after change (in bytes) + threshold: Optional percentage threshold for "significant" change. + If provided, adds supplemental emoji (🎉/🚨/🔸/✅) to chart icons. + If None, only shows chart icons (📈/📉/➡️). + + Returns: + Formatted string with delta and percentage + """ + delta = after - before + percentage = 0.0 if before == 0 else (delta / before) * 100 + + # Always use chart icons to show direction + if delta > 0: + delta_str = f"+{delta:,} bytes" + trend_icon = "📈" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🚨" if abs(percentage) > threshold else "🔸" + emoji = f"{trend_icon} {significance}" + else: + emoji = trend_icon + elif delta < 0: + delta_str = f"{delta:,} bytes" + trend_icon = "📉" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🎉" if abs(percentage) > threshold else "✅" + emoji = f"{trend_icon} {significance}" + else: + emoji = trend_icon + else: + delta_str = "+0 bytes" + emoji = "➡️" + + # Format percentage with sign + if percentage > 0: + pct_str = f"+{percentage:.2f}%" + elif percentage < 0: + pct_str = f"{percentage:.2f}%" + else: + pct_str = "0.00%" + + return f"{emoji} {delta_str} ({pct_str})" + + +def prepare_symbol_changes_data( + target_symbols: dict | None, pr_symbols: dict | None +) -> dict | None: + """Prepare symbol changes data for template rendering. + + Args: + target_symbols: Symbol name to size mapping for target branch + pr_symbols: Symbol name to size mapping for PR branch + + Returns: + Dictionary with changed, new, and removed symbols, or None if no changes + """ + if not target_symbols or not pr_symbols: + return None + + # Find all symbols that exist in both branches or only in one + all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) + + # Track changes + changed_symbols: list[ + tuple[str, int, int, int] + ] = [] # (symbol, target_size, pr_size, delta) + new_symbols: list[tuple[str, int]] = [] # (symbol, size) + removed_symbols: list[tuple[str, int]] = [] # (symbol, size) + + for symbol in all_symbols: + target_size = target_symbols.get(symbol, 0) + pr_size = pr_symbols.get(symbol, 0) + + if target_size == 0 and pr_size > 0: + # New symbol + new_symbols.append((symbol, pr_size)) + elif target_size > 0 and pr_size == 0: + # Removed symbol + removed_symbols.append((symbol, target_size)) + elif target_size != pr_size: + # Changed symbol + delta = pr_size - target_size + changed_symbols.append((symbol, target_size, pr_size, delta)) + + if not changed_symbols and not new_symbols and not removed_symbols: + return None + + # Sort by size/delta + changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) + new_symbols.sort(key=lambda x: x[1], reverse=True) + removed_symbols.sort(key=lambda x: x[1], reverse=True) + + return { + "changed_symbols": changed_symbols, + "new_symbols": new_symbols, + "removed_symbols": removed_symbols, + } + + +def prepare_component_breakdown_data( + target_analysis: dict | None, pr_analysis: dict | None +) -> list[tuple[str, int, int, int]] | None: + """Prepare component breakdown data for template rendering. + + Args: + target_analysis: Component memory breakdown for target branch + pr_analysis: Component memory breakdown for PR branch + + Returns: + List of tuples (component, target_flash, pr_flash, delta), or None if no changes + """ + if not target_analysis or not pr_analysis: + return None + + # Combine all components from both analyses + all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) + + # Filter to components that have changed (ignoring noise) + changed_components: list[ + tuple[str, int, int, int] + ] = [] # (comp, target_flash, pr_flash, delta) + for comp in all_components: + target_mem = target_analysis.get(comp, {}) + pr_mem = pr_analysis.get(comp, {}) + + target_flash = target_mem.get("flash_total", 0) + pr_flash = pr_mem.get("flash_total", 0) + + # Only include if component has meaningful change (above noise threshold) + delta = pr_flash - target_flash + if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD: + changed_components.append((comp, target_flash, pr_flash, delta)) + + if not changed_components: + return None + + # Sort by absolute delta (largest changes first) + changed_components.sort(key=lambda x: abs(x[3]), reverse=True) + + return changed_components + + +def create_comment_body( + components: list[str], + platform: str, + target_ram: int, + target_flash: int, + pr_ram: int, + pr_flash: int, + target_analysis: dict | None = None, + pr_analysis: dict | None = None, + target_symbols: dict | None = None, + pr_symbols: dict | None = None, + target_cache_hit: bool = False, +) -> str: + """Create the comment body with memory impact analysis using Jinja2 templates. + + Args: + components: List of component names (merged config) + platform: Platform name + target_ram: RAM usage in target branch + target_flash: Flash usage in target branch + pr_ram: RAM usage in PR branch + pr_flash: Flash usage in PR branch + target_analysis: Optional component breakdown for target branch + pr_analysis: Optional component breakdown for PR branch + target_symbols: Optional symbol map for target branch + pr_symbols: Optional symbol map for PR branch + target_cache_hit: Whether target branch analysis was loaded from cache + + Returns: + Formatted comment body + """ + # Set up Jinja2 environment + env = Environment( + loader=FileSystemLoader(TEMPLATE_DIR), + trim_blocks=True, + lstrip_blocks=True, + ) + + # Register custom filters + env.filters["format_bytes"] = format_bytes + env.filters["format_change"] = format_change + + # Prepare template context + context = { + "comment_marker": COMMENT_MARKER, + "platform": platform, + "target_ram": format_bytes(target_ram), + "pr_ram": format_bytes(pr_ram), + "target_flash": format_bytes(target_flash), + "pr_flash": format_bytes(pr_flash), + "ram_change": format_change( + target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD + ), + "flash_change": format_change( + target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD + ), + "target_cache_hit": target_cache_hit, + "component_change_threshold": COMPONENT_CHANGE_THRESHOLD, + } + + # Format components list + if len(components) == 1: + context["components_str"] = f"`{components[0]}`" + context["config_note"] = "a representative test configuration" + else: + context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components)) + context["config_note"] = ( + f"a merged configuration with {len(components)} components" + ) + + # Prepare component breakdown if available + component_breakdown = "" + if target_analysis and pr_analysis: + changed_components = prepare_component_breakdown_data( + target_analysis, pr_analysis + ) + if changed_components: + template = env.get_template("ci_memory_impact_component_breakdown.j2") + component_breakdown = template.render( + changed_components=changed_components, + format_bytes=format_bytes, + format_change=format_change, + component_change_threshold=COMPONENT_CHANGE_THRESHOLD, + max_rows=MAX_COMPONENT_BREAKDOWN_ROWS, + ) + + # Prepare symbol changes if available + symbol_changes = "" + if target_symbols and pr_symbols: + symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols) + if symbol_data: + template = env.get_template("ci_memory_impact_symbol_changes.j2") + symbol_changes = template.render( + **symbol_data, + format_bytes=format_bytes, + format_change=format_change, + max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS, + max_new_rows=MAX_NEW_SYMBOLS_ROWS, + max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS, + symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH, + symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH, + ) + + if not target_analysis or not pr_analysis: + print("No ELF files provided, skipping detailed analysis", file=sys.stderr) + + context["component_breakdown"] = component_breakdown + context["symbol_changes"] = symbol_changes + + # Render main template + template = env.get_template("ci_memory_impact_comment_template.j2") + return template.render(**context) + + +def find_existing_comment(pr_number: str) -> str | None: + """Find existing memory impact comment on the PR. + + Args: + pr_number: PR number + + Returns: + Comment numeric ID if found, None otherwise + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr) + + # Use gh api to get comments directly - this returns the numeric id field + result = subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", + "--jq", + ".[] | {id, body}", + ], + capture_output=True, + text=True, + check=True, + ) + + print( + f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", + file=sys.stderr, + ) + + # Parse comments and look for our marker + comment_count = 0 + for line in result.stdout.strip().split("\n"): + if not line: + continue + + try: + comment = json.loads(line) + comment_count += 1 + comment_id = comment.get("id") + print( + f"DEBUG: Checking comment {comment_count}: id={comment_id}", + file=sys.stderr, + ) + + body = comment.get("body", "") + if COMMENT_MARKER in body: + print( + f"DEBUG: Found existing comment with id={comment_id}", + file=sys.stderr, + ) + # Return the numeric id + return str(comment_id) + print("DEBUG: Comment does not contain marker", file=sys.stderr) + except json.JSONDecodeError as e: + print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) + continue + + print( + f"DEBUG: No existing comment found (checked {comment_count} comments)", + file=sys.stderr, + ) + return None + + +def update_existing_comment(comment_id: str, comment_body: str) -> None: + """Update an existing comment. + + Args: + comment_id: Comment ID to update + comment_body: New comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr) + result = subprocess.run( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}", + "-X", + "PATCH", + "-f", + f"body={comment_body}", + ], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) + + +def create_new_comment(pr_number: str, comment_body: str) -> None: + """Create a new PR comment. + + Args: + pr_number: PR number + comment_body: Comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr) + result = subprocess.run( + ["gh", "pr", "comment", pr_number, "--body", comment_body], + check=True, + capture_output=True, + text=True, + ) + print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) + + +def post_or_update_comment(pr_number: str, comment_body: str) -> None: + """Post a new comment or update existing one. + + Args: + pr_number: PR number + comment_body: Comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + # Look for existing comment + existing_comment_id = find_existing_comment(pr_number) + + if existing_comment_id and existing_comment_id != "None": + update_existing_comment(existing_comment_id, comment_body) + else: + create_new_comment(pr_number, comment_body) + + print("Comment posted/updated successfully", file=sys.stderr) + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Post or update PR comment with memory impact analysis" + ) + parser.add_argument("--pr-number", required=True, help="PR number") + parser.add_argument( + "--components", + required=True, + help='JSON array of component names (e.g., \'["api", "wifi"]\')', + ) + parser.add_argument("--platform", required=True, help="Platform name") + parser.add_argument( + "--target-ram", type=int, required=True, help="Target branch RAM usage" + ) + parser.add_argument( + "--target-flash", type=int, required=True, help="Target branch flash usage" + ) + parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage") + parser.add_argument( + "--pr-flash", type=int, required=True, help="PR branch flash usage" + ) + parser.add_argument( + "--target-json", + help="Optional path to target branch analysis JSON (for detailed analysis)", + ) + parser.add_argument( + "--pr-json", + help="Optional path to PR branch analysis JSON (for detailed analysis)", + ) + parser.add_argument( + "--target-cache-hit", + action="store_true", + help="Indicates that target branch analysis was loaded from cache", + ) + + args = parser.parse_args() + + # Parse components from JSON + try: + components = json.loads(args.components) + if not isinstance(components, list): + print("Error: --components must be a JSON array", file=sys.stderr) + sys.exit(1) + except json.JSONDecodeError as e: + print(f"Error parsing --components JSON: {e}", file=sys.stderr) + sys.exit(1) + + # Load analysis JSON files + target_analysis = None + pr_analysis = None + target_symbols = None + pr_symbols = None + + if args.target_json: + target_data = load_analysis_json(args.target_json) + if target_data and target_data.get("detailed_analysis"): + target_analysis = target_data["detailed_analysis"].get("components") + target_symbols = target_data["detailed_analysis"].get("symbols") + + if args.pr_json: + pr_data = load_analysis_json(args.pr_json) + if pr_data and pr_data.get("detailed_analysis"): + pr_analysis = pr_data["detailed_analysis"].get("components") + pr_symbols = pr_data["detailed_analysis"].get("symbols") + + # Create comment body + # Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run. + comment_body = create_comment_body( + components=components, + platform=args.platform, + target_ram=args.target_ram, + target_flash=args.target_flash, + pr_ram=args.pr_ram, + pr_flash=args.pr_flash, + target_analysis=target_analysis, + pr_analysis=pr_analysis, + target_symbols=target_symbols, + pr_symbols=pr_symbols, + target_cache_hit=args.target_cache_hit, + ) + + # Post or update comment + post_or_update_comment(args.pr_number, comment_body) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py new file mode 100755 index 0000000000..77d59417e3 --- /dev/null +++ b/script/ci_memory_impact_extract.py @@ -0,0 +1,281 @@ +#!/usr/bin/env python3 +"""Extract memory usage statistics from ESPHome build output. + +This script parses the PlatformIO build output to extract RAM and flash +usage statistics for a compiled component. It's used by the CI workflow to +compare memory usage between branches. + +The script reads compile output from stdin and looks for the standard +PlatformIO output format: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + +Optionally performs detailed memory analysis if a build directory is provided. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import re +import sys + +# Add esphome to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position +from esphome.analyze_memory import MemoryAnalyzer +from esphome.platformio_api import IDEData +from script.ci_helpers import write_github_output + +# Regex patterns for extracting memory usage from PlatformIO output +_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)") + + +def extract_from_compile_output( + output_text: str, +) -> tuple[int | None, int | None, str | None]: + """Extract memory usage and build directory from PlatformIO compile output. + + Supports multiple builds (for component groups or isolated components). + When test_build_components.py creates multiple builds, this sums the + memory usage across all builds. + + Looks for lines like: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + + Also extracts build directory from lines like: + INFO Compiling app... Build path: /path/to/build + + Args: + output_text: Compile output text (may contain multiple builds) + + Returns: + Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found + """ + # Find all RAM and Flash matches (may be multiple builds) + ram_matches = _RAM_PATTERN.findall(output_text) + flash_matches = _FLASH_PATTERN.findall(output_text) + + if not ram_matches or not flash_matches: + return None, None, None + + # Sum all builds (handles multiple component groups) + total_ram = sum(int(match) for match in ram_matches) + total_flash = sum(int(match) for match in flash_matches) + + # Extract build directory from ESPHome's explicit build path output + # Look for: INFO Compiling app... Build path: /path/to/build + # Note: Multiple builds reuse the same build path (each overwrites the previous) + build_dir = None + if match := _BUILD_PATH_PATTERN.search(output_text): + build_dir = match.group(1).strip() + + return total_ram, total_flash, build_dir + + +def run_detailed_analysis(build_dir: str) -> dict | None: + """Run detailed memory analysis on build directory. + + Args: + build_dir: Path to ESPHome build directory + + Returns: + Dictionary with analysis results or None if analysis fails + """ + build_path = Path(build_dir) + if not build_path.exists(): + print(f"Build directory not found: {build_dir}", file=sys.stderr) + return None + + # Find firmware.elf + elf_path = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_path = str(elf_candidate) + break + + if not elf_path: + print(f"firmware.elf not found in {build_dir}", file=sys.stderr) + return None + + # Find idedata.json - check multiple locations + device_name = build_path.name + idedata_candidates = [ + # In .pioenvs for test builds + build_path / ".pioenvs" / device_name / "idedata.json", + # In .esphome/idedata for regular builds + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + # Check parent directories for .esphome/idedata (for test_build_components) + build_path.parent.parent.parent / "idedata" / f"{device_name}.json", + ] + + idedata = None + for idedata_path in idedata_candidates: + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print( + f"Warning: Failed to load idedata from {idedata_path}: {e}", + file=sys.stderr, + ) + + analyzer = MemoryAnalyzer(elf_path, idedata=idedata) + components = analyzer.analyze() + + # Convert to JSON-serializable format + result = { + "components": { + name: { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + for name, mem in components.items() + }, + "symbols": {}, + } + + # Build symbol map + for section in analyzer.sections.values(): + for symbol_name, size, _ in section.symbols: + if size > 0: + demangled = analyzer._demangle_symbol(symbol_name) + result["symbols"][demangled] = size + + return result + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Extract memory usage from ESPHome build output" + ) + parser.add_argument( + "--output-env", + action="store_true", + help="Output to GITHUB_OUTPUT environment file", + ) + parser.add_argument( + "--build-dir", + help="Optional build directory for detailed memory analysis (overrides auto-detection)", + ) + parser.add_argument( + "--output-json", + help="Optional path to save detailed analysis JSON", + ) + parser.add_argument( + "--output-build-dir", + help="Optional path to write the detected build directory", + ) + + args = parser.parse_args() + + # Read compile output from stdin + compile_output = sys.stdin.read() + + # Extract memory usage and build directory + ram_bytes, flash_bytes, detected_build_dir = extract_from_compile_output( + compile_output + ) + + if ram_bytes is None or flash_bytes is None: + print("Failed to extract memory usage from compile output", file=sys.stderr) + print("Expected lines like:", file=sys.stderr) + print( + " RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)", + file=sys.stderr, + ) + print( + " Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)", + file=sys.stderr, + ) + return 1 + + # Count how many builds were found + num_builds = len(_RAM_PATTERN.findall(compile_output)) + + if num_builds > 1: + print( + f"Found {num_builds} builds - summing memory usage across all builds", + file=sys.stderr, + ) + print( + "WARNING: Detailed analysis will only cover the last build", + file=sys.stderr, + ) + + print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) + print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) + + # Determine which build directory to use (explicit arg overrides auto-detection) + build_dir = args.build_dir or detected_build_dir + + if detected_build_dir: + print(f"Detected build directory: {detected_build_dir}", file=sys.stderr) + if num_builds > 1: + print( + f" (using last of {num_builds} builds for detailed analysis)", + file=sys.stderr, + ) + + # Write build directory to file if requested + if args.output_build_dir and build_dir: + build_dir_path = Path(args.output_build_dir) + build_dir_path.parent.mkdir(parents=True, exist_ok=True) + build_dir_path.write_text(build_dir) + print(f"Wrote build directory to {args.output_build_dir}", file=sys.stderr) + + # Run detailed analysis if build directory available + detailed_analysis = None + if build_dir: + print(f"Running detailed analysis on {build_dir}", file=sys.stderr) + detailed_analysis = run_detailed_analysis(build_dir) + + # Save JSON output if requested + if args.output_json: + output_data = { + "ram_bytes": ram_bytes, + "flash_bytes": flash_bytes, + "detailed_analysis": detailed_analysis, + } + + output_path = Path(args.output_json) + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w", encoding="utf-8") as f: + json.dump(output_data, f, indent=2) + print(f"Saved analysis to {args.output_json}", file=sys.stderr) + + if args.output_env: + # Output to GitHub Actions + write_github_output( + { + "ram_usage": ram_bytes, + "flash_usage": flash_bytes, + } + ) + else: + print(f"{ram_bytes},{flash_bytes}") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index b000ecee3b..570b1a762c 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -10,7 +10,13 @@ what files have changed. It outputs JSON with the following structure: "clang_format": true/false, "python_linters": true/false, "changed_components": ["component1", "component2", ...], - "component_test_count": 5 + "component_test_count": 5, + "memory_impact": { + "should_run": "true/false", + "components": ["component1", "component2", ...], + "platform": "esp32-idf", + "use_merged_config": "true" + } } The CI workflow uses this information to: @@ -20,6 +26,7 @@ The CI workflow uses this information to: - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Determine which components to test individually - Decide how to split component tests (if there are many) +- Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes Usage: python script/determine-jobs.py [-b BRANCH] @@ -31,6 +38,8 @@ Options: from __future__ import annotations import argparse +from collections import Counter +from enum import StrEnum from functools import cache import json import os @@ -40,16 +49,47 @@ import sys from typing import Any from helpers import ( + BASE_BUS_COMPONENTS, CPP_FILE_EXTENSIONS, - ESPHOME_COMPONENTS_PATH, PYTHON_FILE_EXTENSIONS, changed_files, get_all_dependencies, + get_component_from_path, + get_component_test_files, get_components_from_integration_fixtures, + parse_test_filename, root_path, ) +class Platform(StrEnum): + """Platform identifiers for memory impact analysis.""" + + ESP8266_ARD = "esp8266-ard" + ESP32_IDF = "esp32-idf" + ESP32_C3_IDF = "esp32-c3-idf" + ESP32_C6_IDF = "esp32-c6-idf" + ESP32_S2_IDF = "esp32-s2-idf" + ESP32_S3_IDF = "esp32-s3-idf" + + +# Memory impact analysis constants +MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes +MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform + +# Platform preference order for memory impact analysis +# Prefer newer platforms first as they represent the future of ESPHome +# ESP8266 is most constrained but many new features don't support it +MEMORY_IMPACT_PLATFORM_PREFERENCE = [ + Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) + Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) + Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) + Platform.ESP32_C3_IDF, # ESP32-C3 IDF + Platform.ESP32_S2_IDF, # ESP32-S2 IDF + Platform.ESP32_S3_IDF, # ESP32-S3 IDF +] + + def should_run_integration_tests(branch: str | None = None) -> bool: """Determine if integration tests should run based on changed files. @@ -105,12 +145,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool: # Check if any required components changed for file in files: - if file.startswith(ESPHOME_COMPONENTS_PATH): - parts = file.split("/") - if len(parts) >= 3: - component = parts[2] - if component in all_required_components: - return True + component = get_component_from_path(file) + if component and component in all_required_components: + return True return False @@ -224,10 +261,136 @@ def _component_has_tests(component: str) -> bool: Returns: True if the component has test YAML files """ - tests_dir = Path(root_path) / "tests" / "components" / component - if not tests_dir.exists(): - return False - return any(tests_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component)) + + +def detect_memory_impact_config( + branch: str | None = None, +) -> dict[str, Any]: + """Determine memory impact analysis configuration. + + Always runs memory impact analysis when there are changed components, + building a merged configuration with all changed components (like + test_build_components.py does) to get comprehensive memory analysis. + + Args: + branch: Branch to compare against + + Returns: + Dictionary with memory impact analysis parameters: + - should_run: "true" or "false" + - components: list of component names to analyze + - platform: platform name for the merged build + - use_merged_config: "true" (always use merged config) + """ + + # Get actually changed files (not dependencies) + files = changed_files(branch) + + # Find all changed components (excluding core and base bus components) + changed_component_set: set[str] = set() + has_core_changes = False + + for file in files: + component = get_component_from_path(file) + if component: + # Skip base bus components as they're used across many builds + if component not in BASE_BUS_COMPONENTS: + changed_component_set.add(component) + elif file.startswith("esphome/"): + # Core ESPHome files changed (not component-specific) + has_core_changes = True + + # If no components changed but core changed, test representative component + force_fallback_platform = False + if not changed_component_set and has_core_changes: + print( + f"Memory impact: No components changed, but core files changed. " + f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.", + file=sys.stderr, + ) + changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT) + force_fallback_platform = True # Use fallback platform (most representative) + elif not changed_component_set: + # No components and no core changes + return {"should_run": "false"} + + # Find components that have tests and collect their supported platforms + components_with_tests: list[str] = [] + component_platforms_map: dict[ + str, set[Platform] + ] = {} # Track which platforms each component supports + + for component in sorted(changed_component_set): + # Look for test files on preferred platforms + test_files = get_component_test_files(component) + if not test_files: + continue + + # Check if component has tests for any preferred platform + available_platforms = [ + platform + for test_file in test_files + if (platform := parse_test_filename(test_file)[1]) != "all" + and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE + ] + + if not available_platforms: + continue + + component_platforms_map[component] = set(available_platforms) + components_with_tests.append(component) + + # If no components have tests, don't run memory impact + if not components_with_tests: + return {"should_run": "false"} + + # Find common platforms supported by ALL components + # This ensures we can build all components together in a merged config + common_platforms = set(MEMORY_IMPACT_PLATFORM_PREFERENCE) + for component, platforms in component_platforms_map.items(): + common_platforms &= platforms + + # Select the most preferred platform from the common set + # Exception: for core changes, use fallback platform (most representative of codebase) + if force_fallback_platform: + platform = MEMORY_IMPACT_FALLBACK_PLATFORM + elif common_platforms: + # Pick the most preferred platform that all components support + platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index) + else: + # No common platform - pick the most commonly supported platform + # This allows testing components individually even if they can't be merged + # Count how many components support each platform + platform_counts = Counter( + p for platforms in component_platforms_map.values() for p in platforms + ) + # Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE + platform = max( + platform_counts.keys(), + key=lambda p: ( + platform_counts[p], + -MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p), + ), + ) + + # Debug output + print("Memory impact analysis:", file=sys.stderr) + print(f" Changed components: {sorted(changed_component_set)}", file=sys.stderr) + print(f" Components with tests: {components_with_tests}", file=sys.stderr) + print( + f" Component platforms: {dict(sorted(component_platforms_map.items()))}", + file=sys.stderr, + ) + print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr) + print(f" Selected platform: {platform}", file=sys.stderr) + + return { + "should_run": "true", + "components": components_with_tests, + "platform": platform, + "use_merged_config": "true", + } def main() -> None: @@ -279,6 +442,9 @@ def main() -> None: if component not in directly_changed_components ] + # Detect components for memory impact analysis (merged config) + memory_impact = detect_memory_impact_config(args.branch) + # Build output output: dict[str, Any] = { "integration_tests": run_integration, @@ -292,6 +458,7 @@ def main() -> None: "component_test_count": len(changed_components_with_tests), "directly_changed_count": len(directly_changed_with_tests), "dependency_only_count": len(dependency_only_components), + "memory_impact": memory_impact, } # Output as JSON diff --git a/script/helpers.py b/script/helpers.py index 61306b9489..edde3d78af 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -29,6 +29,18 @@ YAML_FILE_EXTENSIONS = (".yaml", ".yml") # Component path prefix ESPHOME_COMPONENTS_PATH = "esphome/components/" +# Base bus components - these ARE the bus implementations and should not +# be flagged as needing migration since they are the platform/base components +BASE_BUS_COMPONENTS = { + "i2c", + "spi", + "uart", + "modbus", + "canbus", + "remote_transmitter", + "remote_receiver", +} + def parse_list_components_output(output: str) -> list[str]: """Parse the output from list-components.py script. @@ -46,6 +58,65 @@ def parse_list_components_output(output: str) -> list[str]: return [c.strip() for c in output.strip().split("\n") if c.strip()] +def parse_test_filename(test_file: Path) -> tuple[str, str]: + """Parse test filename to extract test name and platform. + + Test files follow the naming pattern: test..yaml or test-..yaml + + Args: + test_file: Path to test file + + Returns: + Tuple of (test_name, platform) + """ + parts = test_file.stem.split(".") + if len(parts) == 2: + return parts[0], parts[1] # test, platform + return parts[0], "all" + + +def get_component_from_path(file_path: str) -> str | None: + """Extract component name from a file path. + + Args: + file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp") + + Returns: + Component name if path is in components directory, None otherwise + """ + if not file_path.startswith(ESPHOME_COMPONENTS_PATH): + return None + parts = file_path.split("/") + if len(parts) >= 3: + return parts[2] + return None + + +def get_component_test_files( + component: str, *, all_variants: bool = False +) -> list[Path]: + """Get test files for a component. + + Args: + component: Component name (e.g., "wifi") + all_variants: If True, returns all test files including variants (test-*.yaml). + If False, returns only base test files (test.*.yaml). + Default is False. + + Returns: + List of test file paths for the component, or empty list if none exist + """ + tests_dir = Path(root_path) / "tests" / "components" / component + if not tests_dir.exists(): + return [] + + if all_variants: + # Match both test.*.yaml and test-*.yaml patterns + return list(tests_dir.glob("test[.-]*.yaml")) + # Match only test.*.yaml (base tests) + return list(tests_dir.glob("test.*.yaml")) + + def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: prefix = "".join(color) if isinstance(color, tuple) else color suffix = colorama.Style.RESET_ALL if reset else "" @@ -314,11 +385,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]: # because changes in one file can affect other files in the same component. filtered_files = [] for f in files: - if f.startswith(ESPHOME_COMPONENTS_PATH): - # Check if file belongs to any of the changed components - parts = f.split("/") - if len(parts) >= 3 and parts[2] in component_set: - filtered_files.append(f) + component = get_component_from_path(f) + if component and component in component_set: + filtered_files.append(f) return filtered_files diff --git a/script/list-components.py b/script/list-components.py index 9abb2bc345..11533ceb30 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -4,7 +4,7 @@ from collections.abc import Callable from pathlib import Path import sys -from helpers import changed_files, git_ls_files +from helpers import changed_files, get_component_from_path, git_ls_files from esphome.const import ( KEY_CORE, @@ -30,11 +30,9 @@ def get_all_component_files() -> list[str]: def extract_component_names_array_from_files_array(files): components = [] for file in files: - file_parts = file.split("/") - if len(file_parts) >= 4: - component_name = file_parts[2] - if component_name not in components: - components.append(component_name) + component_name = get_component_from_path(file) + if component_name and component_name not in components: + components.append(component_name) return components diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index dff46d3619..6ba2598eda 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -28,6 +28,7 @@ from script.analyze_component_buses import ( create_grouping_signature, merge_compatible_bus_groups, ) +from script.helpers import get_component_test_files # Weighting for batch creation # Isolated components can't be grouped/merged, so they count as 10x @@ -45,17 +46,12 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool: Args: component_name: Name of the component - tests_dir: Path to tests/components directory + tests_dir: Path to tests/components directory (unused, kept for compatibility) Returns: True if the component has test.*.yaml files """ - component_dir = tests_dir / component_name - if not component_dir.exists() or not component_dir.is_dir(): - return False - - # Check for test.*.yaml files - return any(component_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component_name)) def create_intelligent_batches( diff --git a/script/templates/ci_memory_impact_comment_template.j2 b/script/templates/ci_memory_impact_comment_template.j2 new file mode 100644 index 0000000000..9fbf78e99f --- /dev/null +++ b/script/templates/ci_memory_impact_comment_template.j2 @@ -0,0 +1,27 @@ +{{ comment_marker }} +## Memory Impact Analysis + +**Components:** {{ components_str }} +**Platform:** `{{ platform }}` + +| Metric | Target Branch | This PR | Change | +|--------|--------------|---------|--------| +| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} | +| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} | +{% if component_breakdown %} +{{ component_breakdown }} +{% endif %} +{% if symbol_changes %} +{{ symbol_changes }} +{% endif %} +{%- if target_cache_hit %} + +> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI). +{%- endif %} + +--- +> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). +> **Dynamic memory (heap)** cannot be measured automatically. +> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues. + +*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.* diff --git a/script/templates/ci_memory_impact_component_breakdown.j2 b/script/templates/ci_memory_impact_component_breakdown.j2 new file mode 100644 index 0000000000..a781e5c546 --- /dev/null +++ b/script/templates/ci_memory_impact_component_breakdown.j2 @@ -0,0 +1,15 @@ + +
+📊 Component Memory Breakdown + +| Component | Target Flash | PR Flash | Change | +|-----------|--------------|----------|--------| +{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%} +{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%} +| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} | +{% endfor -%} +{% if changed_components|length > max_rows -%} +| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* | +{% endif -%} + +
diff --git a/script/templates/ci_memory_impact_macros.j2 b/script/templates/ci_memory_impact_macros.j2 new file mode 100644 index 0000000000..9fb346a7c5 --- /dev/null +++ b/script/templates/ci_memory_impact_macros.j2 @@ -0,0 +1,8 @@ +{#- Macro for formatting symbol names in tables -#} +{%- macro format_symbol(symbol, max_length, truncate_length) -%} +{%- if symbol|length <= max_length -%} +`{{ symbol }}` +{%- else -%} +
{{ symbol[:truncate_length] }}...{{ symbol }}
+{%- endif -%} +{%- endmacro -%} diff --git a/script/templates/ci_memory_impact_symbol_changes.j2 b/script/templates/ci_memory_impact_symbol_changes.j2 new file mode 100644 index 0000000000..60f2f50e48 --- /dev/null +++ b/script/templates/ci_memory_impact_symbol_changes.j2 @@ -0,0 +1,51 @@ +{%- from 'ci_memory_impact_macros.j2' import format_symbol -%} + +
+🔍 Symbol-Level Changes (click to expand) + +{% if changed_symbols %} + +### Changed Symbols + +| Symbol | Target Size | PR Size | Change | +|--------|-------------|---------|--------| +{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} | +{% endfor -%} +{% if changed_symbols|length > max_changed_rows -%} +| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* | +{% endif -%} + +{% endif %} +{% if new_symbols %} + +### New Symbols (top {{ max_new_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in new_symbols[:max_new_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if new_symbols|length > max_new_rows -%} +{% set total_new_size = new_symbols|sum(attribute=1) -%} +| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* | +{% endif -%} + +{% endif %} +{% if removed_symbols %} + +### Removed Symbols (top {{ max_removed_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in removed_symbols[:max_removed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if removed_symbols|length > max_removed_rows -%} +{% set total_removed_size = removed_symbols|sum(attribute=1) -%} +| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* | +{% endif -%} + +{% endif %} + +
diff --git a/script/test_build_components.py b/script/test_build_components.py index df092c091d..77c97a8773 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -39,6 +39,7 @@ from script.analyze_component_buses import ( merge_compatible_bus_groups, uses_local_file_references, ) +from script.helpers import get_component_test_files from script.merge_component_configs import merge_component_configs @@ -82,13 +83,14 @@ def show_disk_space_if_ci(esphome_command: str) -> None: def find_component_tests( - components_dir: Path, component_pattern: str = "*" + components_dir: Path, component_pattern: str = "*", base_only: bool = False ) -> dict[str, list[Path]]: """Find all component test files. Args: components_dir: Path to tests/components directory component_pattern: Glob pattern for component names + base_only: If True, only find base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Dictionary mapping component name to list of test files @@ -99,9 +101,10 @@ def find_component_tests( if not comp_dir.is_dir(): continue - # Find test files matching test.*.yaml or test-*.yaml patterns - for test_file in comp_dir.glob("test[.-]*.yaml"): - component_tests[comp_dir.name].append(test_file) + # Get test files using helper function + test_files = get_component_test_files(comp_dir.name, all_variants=not base_only) + if test_files: + component_tests[comp_dir.name] = test_files return dict(component_tests) @@ -931,6 +934,7 @@ def test_components( continue_on_fail: bool, enable_grouping: bool = True, isolated_components: set[str] | None = None, + base_only: bool = False, ) -> int: """Test components with optional intelligent grouping. @@ -944,6 +948,7 @@ def test_components( These are tested WITHOUT --testing-mode to enable full validation (pin conflicts, etc). This is used in CI for directly changed components to catch issues that would be missed with --testing-mode. + base_only: If True, only test base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Exit code (0 for success, 1 for failure) @@ -961,7 +966,7 @@ def test_components( # Find all component tests all_tests = {} for pattern in component_patterns: - all_tests.update(find_component_tests(tests_dir, pattern)) + all_tests.update(find_component_tests(tests_dir, pattern, base_only)) if not all_tests: print(f"No components found matching: {component_patterns}") @@ -1122,6 +1127,11 @@ def main() -> int: "These are tested WITHOUT --testing-mode to enable full validation. " "Used in CI for directly changed components to catch pin conflicts and other issues.", ) + parser.add_argument( + "--base-only", + action="store_true", + help="Only test base test files (test.*.yaml), not variant files (test-*.yaml)", + ) args = parser.parse_args() @@ -1140,6 +1150,7 @@ def main() -> int: continue_on_fail=args.continue_on_fail, enable_grouping=not args.no_grouping, isolated_components=isolated_components, + base_only=args.base_only, ) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 0559d116be..b479fc03c5 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -17,6 +17,9 @@ script_dir = os.path.abspath( ) sys.path.insert(0, script_dir) +# Import helpers module for patching +import helpers # noqa: E402 + spec = importlib.util.spec_from_file_location( "determine_jobs", os.path.join(script_dir, "determine-jobs.py") ) @@ -59,15 +62,29 @@ def mock_subprocess_run() -> Generator[Mock, None, None]: yield mock +@pytest.fixture +def mock_changed_files() -> Generator[Mock, None, None]: + """Mock changed_files for memory impact detection.""" + with patch.object(determine_jobs, "changed_files") as mock: + # Default to empty list + mock.return_value = [] + yield mock + + def test_main_all_tests_should_run( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when all tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = True mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = True @@ -100,6 +117,9 @@ def test_main_all_tests_should_run( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" # No files changed def test_main_no_tests_should_run( @@ -108,9 +128,14 @@ def test_main_no_tests_should_run( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when no tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False @@ -136,6 +161,9 @@ def test_main_no_tests_should_run( assert output["changed_components"] == [] assert output["changed_components_with_tests"] == [] assert output["component_test_count"] == 0 + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" def test_main_list_components_fails( @@ -169,9 +197,14 @@ def test_main_with_branch_argument( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test with branch argument.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = False @@ -216,6 +249,9 @@ def test_main_with_branch_argument( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" def test_should_run_integration_tests( @@ -403,10 +439,15 @@ def test_main_filters_components_without_tests( mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that components without test files are filtered out.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False @@ -440,9 +481,10 @@ def test_main_filters_components_without_tests( airthings_dir = tests_dir / "airthings_ble" airthings_dir.mkdir(parents=True) - # Mock root_path to use tmp_path + # Mock root_path to use tmp_path (need to patch both determine_jobs and helpers) with ( patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), patch("sys.argv", ["determine-jobs.py"]), ): # Clear the cache since we're mocking root_path @@ -459,3 +501,188 @@ def test_main_filters_components_without_tests( assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"} # component_test_count should be based on components with tests assert output["component_test_count"] == 2 + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" + + +# Tests for detect_memory_impact_config function + + +def test_detect_memory_impact_config_with_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components share a common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component with esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # api component with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return wifi and api component changes + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/api/api.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "api"} + assert result["platform"] == "esp32-idf" # Common platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: + """Test memory impact detection with core-only changes (no component changes).""" + # Create test directory structure with fallback component + tests_dir = tmp_path / "tests" / "components" + + # api component (fallback component) with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return only core files (no component files) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/core/application.cpp", + "esphome/core/component.h", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert result["components"] == ["api"] # Fallback component + assert result["platform"] == "esp32-idf" # Fallback platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_no_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components have no common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component only has esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # logger component only has esp8266-ard test + logger_dir = tests_dir / "logger" + logger_dir.mkdir(parents=True) + (logger_dir / "test.esp8266-ard.yaml").write_text("test: logger") + + # Mock changed_files to return both components + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/logger/logger.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should pick the most frequently supported platform + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "logger"} + # When no common platform, picks most commonly supported + # esp8266-ard is preferred over esp32-idf in the preference list + assert result["platform"] in ["esp32-idf", "esp8266-ard"] + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_no_changes(tmp_path: Path) -> None: + """Test memory impact detection when no files changed.""" + # Mock changed_files to return empty list + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_no_components_with_tests(tmp_path: Path) -> None: + """Test memory impact detection when changed components have no tests.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # Create component directory but no test files + custom_component_dir = tests_dir / "my_custom_component" + custom_component_dir.mkdir(parents=True) + + # Mock changed_files to return component without tests + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/my_custom_component/component.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -> None: + """Test that base bus components (i2c, spi, uart) are skipped.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # i2c component (should be skipped as it's a base bus component) + i2c_dir = tests_dir / "i2c" + i2c_dir.mkdir(parents=True) + (i2c_dir / "test.esp32-idf.yaml").write_text("test: i2c") + + # wifi component (should not be skipped) + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # Mock changed_files to return both i2c and wifi + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/i2c/i2c.cpp", + "esphome/components/wifi/wifi.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should only include wifi, not i2c + assert result["should_run"] == "true" + assert result["components"] == ["wifi"] + assert "i2c" not in result["components"] diff --git a/tests/unit_tests/test_platformio_api.py b/tests/unit_tests/test_platformio_api.py index 07948cc6ad..13ef3516e4 100644 --- a/tests/unit_tests/test_platformio_api.py +++ b/tests/unit_tests/test_platformio_api.py @@ -387,6 +387,42 @@ def test_idedata_addr2line_path_unix(setup_core: Path) -> None: assert result == "/usr/bin/addr2line" +def test_idedata_objdump_path_windows(setup_core: Path) -> None: + """Test IDEData.objdump_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "C:\\tools\\objdump.exe" + + +def test_idedata_objdump_path_unix(setup_core: Path) -> None: + """Test IDEData.objdump_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "/usr/bin/objdump" + + +def test_idedata_readelf_path_windows(setup_core: Path) -> None: + """Test IDEData.readelf_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "C:\\tools\\readelf.exe" + + +def test_idedata_readelf_path_unix(setup_core: Path) -> None: + """Test IDEData.readelf_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "/usr/bin/readelf" + + def test_patch_structhash(setup_core: Path) -> None: """Test patch_structhash monkey patches platformio functions.""" # Create simple namespace objects to act as modules From fdecda3d65a5474be4d9ccaf0ea3ee56f84548d2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:48:14 -1000 Subject: [PATCH 145/336] [light] Use bitmask instead of std::set for color modes (#11348) --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_connection.cpp | 4 +- esphome/components/api/api_options.proto | 10 + esphome/components/api/api_pb2.h | 2 +- esphome/components/light/color_mode.h | 195 ++++++++++++++++++ esphome/components/light/light_call.cpp | 53 ++--- esphome/components/light/light_call.h | 5 +- .../components/light/light_json_schema.cpp | 1 - esphome/components/light/light_state.cpp | 3 - esphome/components/light/light_traits.h | 30 +-- script/api_protobuf/api_protobuf.py | 18 +- tests/integration/test_light_calls.py | 44 +++- 12 files changed, 308 insertions(+), 59 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 753adc3592..d202486cfa 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -506,7 +506,7 @@ message ListEntitiesLightResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set"]; + repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"]; // next four supports_* are for legacy clients, newer clients should use color modes // Deprecated in API version 1.6 bool legacy_supports_brightness = 5 [deprecated=true]; diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 7dfefedd54..6334815678 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -453,7 +453,6 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection * bool is_single) { auto *light = static_cast(entity); LightStateResponse resp; - auto traits = light->get_traits(); auto values = light->remote_values; auto color_mode = values.get_color_mode(); resp.state = values.is_on(); @@ -477,7 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); - msg.supported_color_modes = &traits.get_supported_color_modes_for_api_(); + // Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values + msg.supported_color_modes = &traits.get_supported_color_modes(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); diff --git a/esphome/components/api/api_options.proto b/esphome/components/api/api_options.proto index ead8ac0bbc..6b33408e2f 100644 --- a/esphome/components/api/api_options.proto +++ b/esphome/components/api/api_options.proto @@ -70,4 +70,14 @@ extend google.protobuf.FieldOptions { // init(size) before adding elements. This eliminates std::vector template overhead // and is ideal when the exact size is known before populating the array. optional bool fixed_vector = 50013 [default=false]; + + // container_pointer_no_template: Use a non-template container type for repeated fields + // Similar to container_pointer, but for containers that don't take template parameters. + // The container type is used as-is without appending element type. + // The container must have: + // - begin() and end() methods returning iterators + // - empty() method + // Example: [(container_pointer_no_template) = "light::ColorModeMask"] + // generates: const light::ColorModeMask *supported_color_modes{}; + optional string container_pointer_no_template = 50014; } diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 5603204801..ed49498176 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage { #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_light_response"; } #endif - const std::set *supported_color_modes{}; + const light::ColorModeMask *supported_color_modes{}; float min_mireds{0.0f}; float max_mireds{0.0f}; std::vector effects{}; diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index e524763c9f..a26f917167 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -104,5 +104,200 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } +// Type alias for raw color mode bitmask values +using color_mode_bitmask_t = uint16_t; + +// Constants for ColorMode count and bit range +static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE +static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type + +// Compile-time array of all ColorMode values in declaration order +// Bit positions (0-9) map directly to enum declaration order +static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = { + ColorMode::UNKNOWN, // bit 0 + ColorMode::ON_OFF, // bit 1 + ColorMode::BRIGHTNESS, // bit 2 + ColorMode::WHITE, // bit 3 + ColorMode::COLOR_TEMPERATURE, // bit 4 + ColorMode::COLD_WARM_WHITE, // bit 5 + ColorMode::RGB, // bit 6 + ColorMode::RGB_WHITE, // bit 7 + ColorMode::RGB_COLOR_TEMPERATURE, // bit 8 + ColorMode::RGB_COLD_WARM_WHITE, // bit 9 +}; + +/// Map ColorMode enum values to bit positions (0-9) +/// Bit positions follow the enum declaration order +static constexpr int mode_to_bit(ColorMode mode) { + // Linear search through COLOR_MODES array + // Compiler optimizes this to efficient code since array is constexpr + for (int i = 0; i < COLOR_MODE_COUNT; ++i) { + if (COLOR_MODES[i] == mode) + return i; + } + return 0; +} + +/// Map bit positions (0-9) to ColorMode enum values +/// Bit positions follow the enum declaration order +static constexpr ColorMode bit_to_mode(int bit) { + // Direct lookup in COLOR_MODES array + return (bit >= 0 && bit < COLOR_MODE_COUNT) ? COLOR_MODES[bit] : ColorMode::UNKNOWN; +} + +/// Helper to compute capability bitmask at compile time +static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability capability) { + color_mode_bitmask_t mask = 0; + uint8_t cap_bit = static_cast(capability); + + // Check each ColorMode to see if it has this capability + for (int bit = 0; bit < COLOR_MODE_COUNT; ++bit) { + uint8_t mode_val = static_cast(bit_to_mode(bit)); + if ((mode_val & cap_bit) != 0) { + mask |= (1 << bit); + } + } + return mask; +} + +// Number of ColorCapability enum values +static constexpr int COLOR_CAPABILITY_COUNT = 6; + +/// Compile-time lookup table mapping ColorCapability to bitmask +/// This array is computed at compile time using constexpr +static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = { + compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0 + compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1 + compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2 + compute_capability_bitmask(ColorCapability::COLOR_TEMPERATURE), // 1 << 3 + compute_capability_bitmask(ColorCapability::COLD_WARM_WHITE), // 1 << 4 + compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 +}; + +/// Bitmask for storing a set of ColorMode values efficiently. +/// Replaces std::set to eliminate red-black tree overhead (~586 bytes). +class ColorModeMask { + public: + constexpr ColorModeMask() = default; + + /// Support initializer list syntax: {ColorMode::RGB, ColorMode::WHITE} + constexpr ColorModeMask(std::initializer_list modes) { + for (auto mode : modes) { + this->add(mode); + } + } + + constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); } + + /// Add multiple modes at once using initializer list + constexpr void add(std::initializer_list modes) { + for (auto mode : modes) { + this->add(mode); + } + } + + constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; } + + constexpr size_t size() const { + // Count set bits using Brian Kernighan's algorithm + // More efficient for sparse bitmasks (typical case: 2-4 modes out of 10) + uint16_t n = this->mask_; + size_t count = 0; + while (n) { + n &= n - 1; // Clear the least significant set bit + count++; + } + return count; + } + + constexpr bool empty() const { return this->mask_ == 0; } + + /// Iterator support for API encoding + class Iterator { + public: + using iterator_category = std::forward_iterator_tag; + using value_type = ColorMode; + using difference_type = std::ptrdiff_t; + using pointer = const ColorMode *; + using reference = ColorMode; + + constexpr Iterator(color_mode_bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } + + constexpr ColorMode operator*() const { return bit_to_mode(bit_); } + + constexpr Iterator &operator++() { + ++bit_; + advance_to_next_set_bit_(); + return *this; + } + + constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; } + + constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } + + private: + constexpr void advance_to_next_set_bit_() { bit_ = ColorModeMask::find_next_set_bit(mask_, bit_); } + + color_mode_bitmask_t mask_; + int bit_; + }; + + constexpr Iterator begin() const { return Iterator(mask_, 0); } + constexpr Iterator end() const { return Iterator(mask_, MAX_BIT_INDEX); } + + /// Get the raw bitmask value for API encoding + constexpr color_mode_bitmask_t get_mask() const { return this->mask_; } + + /// Find the next set bit in a bitmask starting from a given position + /// Returns the bit position, or MAX_BIT_INDEX if no more bits are set + static constexpr int find_next_set_bit(color_mode_bitmask_t mask, int start_bit) { + int bit = start_bit; + while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) { + ++bit; + } + return bit; + } + + /// Find the first set bit in a bitmask and return the corresponding ColorMode + /// Used for optimizing compute_color_mode_() intersection logic + static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) { + return bit_to_mode(find_next_set_bit(mask, 0)); + } + + /// Check if a ColorMode is present in a raw bitmask value + /// Useful for checking intersection results without creating a temporary ColorModeMask + static constexpr bool mask_contains(color_mode_bitmask_t mask, ColorMode mode) { + return (mask & (1 << mode_to_bit(mode))) != 0; + } + + /// Check if any mode in the bitmask has a specific capability + /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) + bool has_capability(ColorCapability capability) const { + // Lookup the pre-computed bitmask for this capability and check intersection with our mask + // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 + // We need to convert the power-of-2 value to an index + uint8_t cap_val = static_cast(capability); +#if defined(__GNUC__) || defined(__clang__) + // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) + int index = __builtin_ctz(cap_val); +#else + // Fallback for compilers without __builtin_ctz + int index = 0; + while (cap_val > 1) { + cap_val >>= 1; + ++index; + } +#endif + return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0; + } + + private: + // Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan). + // Currently only 10 ColorMode values exist, so 16 bits is sufficient. + // Can be changed to uint32_t if more than 16 color modes are needed in the future. + // Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes). + color_mode_bitmask_t mask_{0}; +}; + } // namespace light } // namespace esphome diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 915b8fdf89..af193e1f11 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -406,7 +406,7 @@ void LightCall::transform_parameters_() { } } ColorMode LightCall::compute_color_mode_() { - auto supported_modes = this->parent_->get_traits().get_supported_color_modes(); + const auto &supported_modes = this->parent_->get_traits().get_supported_color_modes(); int supported_count = supported_modes.size(); // Some lights don't support any color modes (e.g. monochromatic light), leave it at unknown. @@ -425,20 +425,19 @@ ColorMode LightCall::compute_color_mode_() { // If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. - std::set suitable_modes = this->get_suitable_color_modes_(); + // Compute intersection of suitable and supported modes using bitwise AND + color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask(); - // Don't change if the current mode is suitable. - if (suitable_modes.count(current_mode) > 0) { + // Don't change if the current mode is in the intersection (suitable AND supported) + if (ColorModeMask::mask_contains(intersection, current_mode)) { ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(current_mode))); return current_mode; } // Use the preferred suitable mode. - for (auto mode : suitable_modes) { - if (supported_modes.count(mode) == 0) - continue; - + if (intersection != 0) { + ColorMode mode = ColorModeMask::first_mode_from_mask(intersection); ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(mode))); return mode; @@ -451,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -std::set LightCall::get_suitable_color_modes_() { +color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = @@ -459,36 +458,44 @@ std::set LightCall::get_suitable_color_modes_() { bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) || (this->has_red() || this->has_green() || this->has_blue()); -// Build key from flags: [rgb][cwww][ct][white] + // Build key from flags: [rgb][cwww][ct][white] #define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3) uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb); switch (key) { case KEY(true, false, false, false): // white only - return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, false): // ct only - return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(true, true, false, false): // white + ct - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask( + {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, false, true, false): // cwww only - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, false): // none - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB, - ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, + ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}) + .get_mask(); case KEY(true, false, false, true): // rgb + white - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, true): // rgb + ct case KEY(true, true, false, true): // rgb + white + ct - return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, true, true): // rgb + cwww - return {ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, true): // rgb only - return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); default: - return {}; // conflicting flags + return 0; // conflicting flags } #undef KEY diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index d3a526b136..6931b58b9d 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -1,7 +1,6 @@ #pragma once #include "light_color_values.h" -#include namespace esphome { @@ -186,8 +185,8 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); - /// Get potential color modes for this light call. - std::set get_suitable_color_modes_(); + /// Get potential color modes bitmask for this light call. + color_mode_bitmask_t get_suitable_color_modes_mask_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); diff --git a/esphome/components/light/light_json_schema.cpp b/esphome/components/light/light_json_schema.cpp index 010e130612..e754c453b5 100644 --- a/esphome/components/light/light_json_schema.cpp +++ b/esphome/components/light/light_json_schema.cpp @@ -43,7 +43,6 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) { } auto values = state.remote_values; - auto traits = state.get_output()->get_traits(); const auto color_mode = values.get_color_mode(); const char *mode_str = get_color_mode_json_str(color_mode); diff --git a/esphome/components/light/light_state.cpp b/esphome/components/light/light_state.cpp index 1d139e49e7..979dc2f5a1 100644 --- a/esphome/components/light/light_state.cpp +++ b/esphome/components/light/light_state.cpp @@ -191,11 +191,9 @@ void LightState::current_values_as_brightness(float *brightness) { this->current_values.as_brightness(brightness, this->gamma_correct_); } void LightState::current_values_as_rgb(float *red, float *green, float *blue, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgb(red, green, blue, this->gamma_correct_, false); } void LightState::current_values_as_rgbw(float *red, float *green, float *blue, float *white, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgbw(red, green, blue, white, this->gamma_correct_, false); } void LightState::current_values_as_rgbww(float *red, float *green, float *blue, float *cold_white, float *warm_white, @@ -209,7 +207,6 @@ void LightState::current_values_as_rgbct(float *red, float *green, float *blue, white_brightness, this->gamma_correct_); } void LightState::current_values_as_cwww(float *cold_white, float *warm_white, bool constant_brightness) { - auto traits = this->get_traits(); this->current_values.as_cwww(cold_white, warm_white, this->gamma_correct_, constant_brightness); } void LightState::current_values_as_ct(float *color_temperature, float *white_brightness) { diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index a45301d148..c83d8ad2a9 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -2,7 +2,6 @@ #include "esphome/core/helpers.h" #include "color_mode.h" -#include namespace esphome { @@ -19,18 +18,17 @@ class LightTraits { public: LightTraits() = default; - const std::set &get_supported_color_modes() const { return this->supported_color_modes_; } - void set_supported_color_modes(std::set supported_color_modes) { - this->supported_color_modes_ = std::move(supported_color_modes); + const ColorModeMask &get_supported_color_modes() const { return this->supported_color_modes_; } + void set_supported_color_modes(ColorModeMask supported_color_modes) { + this->supported_color_modes_ = supported_color_modes; + } + void set_supported_color_modes(std::initializer_list modes) { + this->supported_color_modes_ = ColorModeMask(modes); } - bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode); } + bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); } bool supports_color_capability(ColorCapability color_capability) const { - for (auto mode : this->supported_color_modes_) { - if (mode & color_capability) - return true; - } - return false; + return this->supported_color_modes_.has_capability(color_capability); } ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21") @@ -59,19 +57,9 @@ class LightTraits { void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; } protected: -#ifdef USE_API - // The API connection is a friend class to access internal methods - friend class api::APIConnection; - // This method returns a reference to the internal color modes set. - // It is used by the API to avoid copying data when encoding messages. - // Warning: Do not use this method outside of the API connection code. - // It returns a reference to internal data that can be invalidated. - const std::set &get_supported_color_modes_for_api_() const { return this->supported_color_modes_; } -#endif - - std::set supported_color_modes_{}; float min_mireds_{0}; float max_mireds_{0}; + ColorModeMask supported_color_modes_{}; }; } // namespace light diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 4936434fc2..2f83b0bd79 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1415,7 +1415,13 @@ class RepeatedTypeInfo(TypeInfo): super().__init__(field) # Check if this is a pointer field by looking for container_pointer option self._container_type = get_field_opt(field, pb.container_pointer, "") - self._use_pointer = bool(self._container_type) + # Check for non-template container pointer + self._container_no_template = get_field_opt( + field, pb.container_pointer_no_template, "" + ) + self._use_pointer = bool(self._container_type) or bool( + self._container_no_template + ) # Check if this should use FixedVector instead of std::vector self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False) @@ -1434,12 +1440,18 @@ class RepeatedTypeInfo(TypeInfo): @property def cpp_type(self) -> str: + if self._container_no_template: + # Non-template container: use type as-is without appending template parameters + return f"const {self._container_no_template}*" if self._use_pointer and self._container_type: # For pointer fields, use the specified container type - # If the container type already includes the element type (e.g., std::set) - # use it as-is, otherwise append the element type + # Two cases: + # 1. "std::set" - Full type with template params, use as-is + # 2. "std::set" - No <>, append the element type if "<" in self._container_type and ">" in self._container_type: + # Has template parameters specified, use as-is return f"const {self._container_type}*" + # No <> at all, append element type return f"const {self._container_type}<{self._ti.cpp_type}>*" if self._use_fixed_vector: return f"FixedVector<{self._ti.cpp_type}>" diff --git a/tests/integration/test_light_calls.py b/tests/integration/test_light_calls.py index af90ddbe86..0eaf5af91b 100644 --- a/tests/integration/test_light_calls.py +++ b/tests/integration/test_light_calls.py @@ -8,6 +8,7 @@ import asyncio from typing import Any from aioesphomeapi import LightState +from aioesphomeapi.model import ColorMode import pytest from .types import APIClientConnectedFactory, RunCompiledFunction @@ -35,10 +36,51 @@ async def test_light_calls( # Get the light entities entities = await client.list_entities_services() lights = [e for e in entities[0] if e.object_id.startswith("test_")] - assert len(lights) >= 2 # Should have RGBCW and RGB lights + assert len(lights) >= 3 # Should have RGBCW, RGB, and Binary lights rgbcw_light = next(light for light in lights if "RGBCW" in light.name) rgb_light = next(light for light in lights if "RGB Light" in light.name) + binary_light = next(light for light in lights if "Binary" in light.name) + + # Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values + # not bit positions. This is critical - the iterator must convert bit positions to actual + # ColorMode enum values for API encoding. + + # RGBCW light (rgbww platform) should support RGB_COLD_WARM_WHITE mode + assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, ( + f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB_COLD_WARM_WHITE.value in [ + mode.value for mode in rgbcw_light.supported_color_modes + ], ( + f"RGBCW light has wrong color mode values. Expected {ColorMode.RGB_COLD_WARM_WHITE.value} " + f"(RGB_COLD_WARM_WHITE), got: {[mode.value for mode in rgbcw_light.supported_color_modes]}" + ) + + # RGB light should support RGB mode + assert ColorMode.RGB in rgb_light.supported_color_modes, ( + f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB.value in [ + mode.value for mode in rgb_light.supported_color_modes + ], ( + f"RGB light has wrong color mode values. Expected {ColorMode.RGB.value} (RGB), got: " + f"{[mode.value for mode in rgb_light.supported_color_modes]}" + ) + + # Binary light (on/off only) should support ON_OFF mode + assert ColorMode.ON_OFF in binary_light.supported_color_modes, ( + f"Binary light missing ON_OFF color mode. Got: {binary_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.ON_OFF.value in [ + mode.value for mode in binary_light.supported_color_modes + ], ( + f"Binary light has wrong color mode values. Expected {ColorMode.ON_OFF.value} (ON_OFF), got: " + f"{[mode.value for mode in binary_light.supported_color_modes]}" + ) async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any: """Wait for a state change for the given entity key.""" From 09b2ad071bdadad82d20080d2352c577b131eebd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:49:13 -1000 Subject: [PATCH 146/336] [esp32_ble_client] Remove duplicate MAC address extraction in set_address() (#11358) --- esphome/components/esp32_ble_client/ble_client_base.h | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/esphome/components/esp32_ble_client/ble_client_base.h b/esphome/components/esp32_ble_client/ble_client_base.h index f2edd6c2b3..7f0ae3b83e 100644 --- a/esphome/components/esp32_ble_client/ble_client_base.h +++ b/esphome/components/esp32_ble_client/ble_client_base.h @@ -61,12 +61,7 @@ class BLEClientBase : public espbt::ESPBTClient, public Component { this->address_str_ = ""; } else { char buf[18]; - uint8_t mac[6] = { - (uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff), - (uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff), - (uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff), - }; - format_mac_addr_upper(mac, buf); + format_mac_addr_upper(this->remote_bda_, buf); this->address_str_ = buf; } } From 57e98ec3fc95104e37984d1a6f25c10a80298c23 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:49:58 -1000 Subject: [PATCH 147/336] [wifi] Replace std::vector with std::unique_ptr for WiFi scan buffer (#11364) --- esphome/components/wifi/wifi_component_esp_idf.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/esphome/components/wifi/wifi_component_esp_idf.cpp b/esphome/components/wifi/wifi_component_esp_idf.cpp index 951f5803a6..ce1cc961d0 100644 --- a/esphome/components/wifi/wifi_component_esp_idf.cpp +++ b/esphome/components/wifi/wifi_component_esp_idf.cpp @@ -776,13 +776,12 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) { } uint16_t number = it.number; - std::vector records(number); - err = esp_wifi_scan_get_ap_records(&number, records.data()); + auto records = std::make_unique(number); + err = esp_wifi_scan_get_ap_records(&number, records.get()); if (err != ESP_OK) { ESP_LOGW(TAG, "esp_wifi_scan_get_ap_records failed: %s", esp_err_to_name(err)); return; } - records.resize(number); scan_result_.init(number); for (int i = 0; i < number; i++) { From bda7676e3a52ef2426177f8bb4422cc47c5d6354 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:51:41 -1000 Subject: [PATCH 148/336] [bluetooth_proxy] Merge duplicate loops in get_connection_() (#11359) --- .../components/bluetooth_proxy/bluetooth_proxy.cpp | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp index cd7261d5e5..34e0aa93a3 100644 --- a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp +++ b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp @@ -155,16 +155,12 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) { for (uint8_t i = 0; i < this->connection_count_; i++) { auto *connection = this->connections_[i]; - if (connection->get_address() == address) + uint64_t conn_addr = connection->get_address(); + + if (conn_addr == address) return connection; - } - if (!reserve) - return nullptr; - - for (uint8_t i = 0; i < this->connection_count_; i++) { - auto *connection = this->connections_[i]; - if (connection->get_address() == 0) { + if (reserve && conn_addr == 0) { connection->send_service_ = INIT_SENDING_SERVICES; connection->set_address(address); // All connections must start at INIT @@ -175,7 +171,6 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese return connection; } } - return nullptr; } From 0266c897c9d33f62be4888ec5def9b71a96f5bd8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:53:00 -1000 Subject: [PATCH 149/336] [mdns] Use std::unique_ptr for TXT records to reduce ESP32 flash usage (#11362) --- esphome/components/mdns/mdns_esp32.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/esphome/components/mdns/mdns_esp32.cpp b/esphome/components/mdns/mdns_esp32.cpp index f2cb2d3ef5..c02bfcbadb 100644 --- a/esphome/components/mdns/mdns_esp32.cpp +++ b/esphome/components/mdns/mdns_esp32.cpp @@ -31,18 +31,17 @@ void MDNSComponent::setup() { mdns_instance_name_set(this->hostname_.c_str()); for (const auto &service : services) { - std::vector txt_records; - for (const auto &record : service.txt_records) { - mdns_txt_item_t it{}; + auto txt_records = std::make_unique(service.txt_records.size()); + for (size_t i = 0; i < service.txt_records.size(); i++) { + const auto &record = service.txt_records[i]; // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_ // Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies - it.key = MDNS_STR_ARG(record.key); - it.value = MDNS_STR_ARG(record.value); - txt_records.push_back(it); + txt_records[i].key = MDNS_STR_ARG(record.key); + txt_records[i].value = MDNS_STR_ARG(record.value); } uint16_t port = const_cast &>(service.port).value(); err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port, - txt_records.data(), txt_records.size()); + txt_records.get(), service.txt_records.size()); if (err != ESP_OK) { ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err)); From 85babe85e4e97dfe3abd5bb4656ea86102118698 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 08:59:47 -1000 Subject: [PATCH 150/336] [sensor] Optimize sliding window filters to eliminate heap fragmentation (#11282) --- esphome/components/sensor/__init__.py | 60 ++- esphome/components/sensor/filter.cpp | 312 +++++++------- esphome/components/sensor/filter.h | 217 +++++++--- tests/components/sensor/common.yaml | 101 +++++ tests/components/sensor/test.esp8266-ard.yaml | 1 + tests/integration/README.md | 75 ++++ .../fixtures/sensor_filters_batch_window.yaml | 58 +++ .../fixtures/sensor_filters_nan_handling.yaml | 84 ++++ .../fixtures/sensor_filters_ring_buffer.yaml | 115 +++++ ...sensor_filters_ring_buffer_wraparound.yaml | 72 ++++ .../sensor_filters_sliding_window.yaml | 123 ++++++ tests/integration/state_utils.py | 167 ++++++++ .../test_sensor_filters_ring_buffer.py | 151 +++++++ .../test_sensor_filters_sliding_window.py | 395 ++++++++++++++++++ 14 files changed, 1697 insertions(+), 234 deletions(-) create mode 100644 tests/components/sensor/common.yaml create mode 100644 tests/components/sensor/test.esp8266-ard.yaml create mode 100644 tests/integration/fixtures/sensor_filters_batch_window.yaml create mode 100644 tests/integration/fixtures/sensor_filters_nan_handling.yaml create mode 100644 tests/integration/fixtures/sensor_filters_ring_buffer.yaml create mode 100644 tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml create mode 100644 tests/integration/fixtures/sensor_filters_sliding_window.yaml create mode 100644 tests/integration/state_utils.py create mode 100644 tests/integration/test_sensor_filters_ring_buffer.py create mode 100644 tests/integration/test_sensor_filters_sliding_window.py diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index bf13217787..d9724a741d 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -251,6 +251,9 @@ MaxFilter = sensor_ns.class_("MaxFilter", Filter) SlidingWindowMovingAverageFilter = sensor_ns.class_( "SlidingWindowMovingAverageFilter", Filter ) +StreamingMinFilter = sensor_ns.class_("StreamingMinFilter", Filter) +StreamingMaxFilter = sensor_ns.class_("StreamingMaxFilter", Filter) +StreamingMovingAverageFilter = sensor_ns.class_("StreamingMovingAverageFilter", Filter) ExponentialMovingAverageFilter = sensor_ns.class_( "ExponentialMovingAverageFilter", Filter ) @@ -452,14 +455,21 @@ async def skip_initial_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config) -@FILTER_REGISTRY.register("min", MinFilter, MIN_SCHEMA) +@FILTER_REGISTRY.register("min", Filter, MIN_SCHEMA) async def min_filter_to_code(config, filter_id): - return cg.new_Pvariable( - filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], - ) + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + # Saves 99.98% memory for large windows (e.g., 20KB → 4 bytes for window_size=5000) + if window_size == send_every: + # Use streaming filter - O(1) memory instead of O(n) + rhs = StreamingMinFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMinFilter) + # Use sliding window filter - maintains ring buffer + rhs = MinFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, MinFilter) MAX_SCHEMA = cv.All( @@ -474,14 +484,18 @@ MAX_SCHEMA = cv.All( ) -@FILTER_REGISTRY.register("max", MaxFilter, MAX_SCHEMA) +@FILTER_REGISTRY.register("max", Filter, MAX_SCHEMA) async def max_filter_to_code(config, filter_id): - return cg.new_Pvariable( - filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], - ) + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + if window_size == send_every: + rhs = StreamingMaxFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMaxFilter) + rhs = MaxFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, MaxFilter) SLIDING_AVERAGE_SCHEMA = cv.All( @@ -498,16 +512,20 @@ SLIDING_AVERAGE_SCHEMA = cv.All( @FILTER_REGISTRY.register( "sliding_window_moving_average", - SlidingWindowMovingAverageFilter, + Filter, SLIDING_AVERAGE_SCHEMA, ) async def sliding_window_moving_average_filter_to_code(config, filter_id): - return cg.new_Pvariable( - filter_id, - config[CONF_WINDOW_SIZE], - config[CONF_SEND_EVERY], - config[CONF_SEND_FIRST_AT], - ) + window_size: int = config[CONF_WINDOW_SIZE] + send_every: int = config[CONF_SEND_EVERY] + send_first_at: int = config[CONF_SEND_FIRST_AT] + + # Optimization: Use streaming filter for batch windows (window_size == send_every) + if window_size == send_every: + rhs = StreamingMovingAverageFilter.new(window_size, send_first_at) + return cg.Pvariable(filter_id, rhs, StreamingMovingAverageFilter) + rhs = SlidingWindowMovingAverageFilter.new(window_size, send_every, send_first_at) + return cg.Pvariable(filter_id, rhs, SlidingWindowMovingAverageFilter) EXPONENTIAL_AVERAGE_SCHEMA = cv.All( diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 3241ae28af..1eb0b84964 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -32,50 +32,76 @@ void Filter::initialize(Sensor *parent, Filter *next) { this->next_ = next; } -// MedianFilter -MedianFilter::MedianFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MedianFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MedianFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MedianFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f)", this, value); +// SlidingWindowFilter +SlidingWindowFilter::SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at) + : window_size_(window_size), send_every_(send_every), send_at_(send_every - send_first_at) { + // Allocate ring buffer once at initialization + this->window_.init(window_size); +} +optional SlidingWindowFilter::new_value(float value) { + // Add value to ring buffer + if (this->window_count_ < this->window_size_) { + // Buffer not yet full - just append + this->window_.push_back(value); + this->window_count_++; + } else { + // Buffer full - overwrite oldest value (ring buffer) + this->window_[this->window_head_] = value; + this->window_head_++; + if (this->window_head_ >= this->window_size_) { + this->window_head_ = 0; + } + } + + // Check if we should send a result if (++this->send_at_ >= this->send_every_) { this->send_at_ = 0; - - float median = NAN; - if (!this->queue_.empty()) { - // Copy queue without NaN values - std::vector median_queue; - median_queue.reserve(this->queue_.size()); - for (auto v : this->queue_) { - if (!std::isnan(v)) { - median_queue.push_back(v); - } - } - - sort(median_queue.begin(), median_queue.end()); - - size_t queue_size = median_queue.size(); - if (queue_size) { - if (queue_size % 2) { - median = median_queue[queue_size / 2]; - } else { - median = (median_queue[queue_size / 2] + median_queue[(queue_size / 2) - 1]) / 2.0f; - } - } - } - - ESP_LOGVV(TAG, "MedianFilter(%p)::new_value(%f) SENDING %f", this, value, median); - return median; + float result = this->compute_result(); + ESP_LOGVV(TAG, "SlidingWindowFilter(%p)::new_value(%f) SENDING %f", this, value, result); + return result; } return {}; } +// SortedWindowFilter +FixedVector SortedWindowFilter::get_window_values_() { + // Copy window without NaN values using FixedVector (no heap allocation) + // Returns unsorted values - caller will use std::nth_element for partial sorting as needed + FixedVector values; + values.init(this->window_count_); + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + values.push_back(v); + } + } + return values; +} + +// MedianFilter +float MedianFilter::compute_result() { + FixedVector values = this->get_window_values_(); + if (values.empty()) + return NAN; + + size_t size = values.size(); + size_t mid = size / 2; + + if (size % 2) { + // Odd number of elements - use nth_element to find middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + return values[mid]; + } + // Even number of elements - need both middle elements + // Use nth_element to find upper middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + float upper = values[mid]; + // Find the maximum of the lower half (which is now everything before mid) + float lower = *std::max_element(values.begin(), values.begin() + mid); + return (lower + upper) / 2.0f; +} + // SkipInitialFilter SkipInitialFilter::SkipInitialFilter(size_t num_to_ignore) : num_to_ignore_(num_to_ignore) {} optional SkipInitialFilter::new_value(float value) { @@ -91,136 +117,39 @@ optional SkipInitialFilter::new_value(float value) { // QuantileFilter QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size), quantile_(quantile) {} -void QuantileFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void QuantileFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -void QuantileFilter::set_quantile(float quantile) { this->quantile_ = quantile; } -optional QuantileFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f), quantile:%f", this, value, this->quantile_); + : SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {} - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; +float QuantileFilter::compute_result() { + FixedVector values = this->get_window_values_(); + if (values.empty()) + return NAN; - float result = NAN; - if (!this->queue_.empty()) { - // Copy queue without NaN values - std::vector quantile_queue; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - quantile_queue.push_back(v); - } - } + size_t position = ceilf(values.size() * this->quantile_) - 1; + ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, values.size()); - sort(quantile_queue.begin(), quantile_queue.end()); - - size_t queue_size = quantile_queue.size(); - if (queue_size) { - size_t position = ceilf(queue_size * this->quantile_) - 1; - ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, queue_size); - result = quantile_queue[position]; - } - } - - ESP_LOGVV(TAG, "QuantileFilter(%p)::new_value(%f) SENDING %f", this, value, result); - return result; - } - return {}; + // Use nth_element to find the quantile element (O(n) instead of O(n log n)) + std::nth_element(values.begin(), values.begin() + position, values.end()); + return values[position]; } // MinFilter -MinFilter::MinFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MinFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MinFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MinFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float min = NAN; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - min = std::isnan(min) ? v : std::min(min, v); - } - } - - ESP_LOGVV(TAG, "MinFilter(%p)::new_value(%f) SENDING %f", this, value, min); - return min; - } - return {}; -} +float MinFilter::compute_result() { return this->find_extremum_>(); } // MaxFilter -MaxFilter::MaxFilter(size_t window_size, size_t send_every, size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void MaxFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void MaxFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional MaxFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float max = NAN; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - max = std::isnan(max) ? v : std::max(max, v); - } - } - - ESP_LOGVV(TAG, "MaxFilter(%p)::new_value(%f) SENDING %f", this, value, max); - return max; - } - return {}; -} +float MaxFilter::compute_result() { return this->find_extremum_>(); } // SlidingWindowMovingAverageFilter -SlidingWindowMovingAverageFilter::SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every, - size_t send_first_at) - : send_every_(send_every), send_at_(send_every - send_first_at), window_size_(window_size) {} -void SlidingWindowMovingAverageFilter::set_send_every(size_t send_every) { this->send_every_ = send_every; } -void SlidingWindowMovingAverageFilter::set_window_size(size_t window_size) { this->window_size_ = window_size; } -optional SlidingWindowMovingAverageFilter::new_value(float value) { - while (this->queue_.size() >= this->window_size_) { - this->queue_.pop_front(); - } - this->queue_.push_back(value); - ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f)", this, value); - - if (++this->send_at_ >= this->send_every_) { - this->send_at_ = 0; - - float sum = 0; - size_t valid_count = 0; - for (auto v : this->queue_) { - if (!std::isnan(v)) { - sum += v; - valid_count++; - } +float SlidingWindowMovingAverageFilter::compute_result() { + float sum = 0; + size_t valid_count = 0; + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + sum += v; + valid_count++; } - - float average = NAN; - if (valid_count) { - average = sum / valid_count; - } - - ESP_LOGVV(TAG, "SlidingWindowMovingAverageFilter(%p)::new_value(%f) SENDING %f", this, value, average); - return average; } - return {}; + return valid_count ? sum / valid_count : NAN; } // ExponentialMovingAverageFilter @@ -543,5 +472,78 @@ optional ToNTCTemperatureFilter::new_value(float value) { return temp; } +// StreamingFilter (base class) +StreamingFilter::StreamingFilter(size_t window_size, size_t send_first_at) + : window_size_(window_size), send_first_at_(send_first_at) {} + +optional StreamingFilter::new_value(float value) { + // Process the value (child class tracks min/max/sum/etc) + this->process_value(value); + + this->count_++; + + // Check if we should send (handle send_first_at for first value) + bool should_send = false; + if (this->first_send_ && this->count_ >= this->send_first_at_) { + should_send = true; + this->first_send_ = false; + } else if (!this->first_send_ && this->count_ >= this->window_size_) { + should_send = true; + } + + if (should_send) { + float result = this->compute_batch_result(); + // Reset for next batch + this->count_ = 0; + this->reset_batch(); + ESP_LOGVV(TAG, "StreamingFilter(%p)::new_value(%f) SENDING %f", this, value, result); + return result; + } + + return {}; +} + +// StreamingMinFilter +void StreamingMinFilter::process_value(float value) { + // Update running minimum (ignore NaN values) + if (!std::isnan(value)) { + this->current_min_ = std::isnan(this->current_min_) ? value : std::min(this->current_min_, value); + } +} + +float StreamingMinFilter::compute_batch_result() { return this->current_min_; } + +void StreamingMinFilter::reset_batch() { this->current_min_ = NAN; } + +// StreamingMaxFilter +void StreamingMaxFilter::process_value(float value) { + // Update running maximum (ignore NaN values) + if (!std::isnan(value)) { + this->current_max_ = std::isnan(this->current_max_) ? value : std::max(this->current_max_, value); + } +} + +float StreamingMaxFilter::compute_batch_result() { return this->current_max_; } + +void StreamingMaxFilter::reset_batch() { this->current_max_ = NAN; } + +// StreamingMovingAverageFilter +void StreamingMovingAverageFilter::process_value(float value) { + // Accumulate sum (ignore NaN values) + if (!std::isnan(value)) { + this->sum_ += value; + this->valid_count_++; + } +} + +float StreamingMovingAverageFilter::compute_batch_result() { + return this->valid_count_ > 0 ? this->sum_ / this->valid_count_ : NAN; +} + +void StreamingMovingAverageFilter::reset_batch() { + this->sum_ = 0.0f; + this->valid_count_ = 0; +} + } // namespace sensor } // namespace esphome diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 49d83e5b4b..57bb06b517 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -44,11 +44,75 @@ class Filter { Sensor *parent_{nullptr}; }; +/** Base class for filters that use a sliding window of values. + * + * Uses a ring buffer to efficiently maintain a fixed-size sliding window without + * reallocations or pop_front() overhead. Eliminates deque fragmentation issues. + */ +class SlidingWindowFilter : public Filter { + public: + SlidingWindowFilter(size_t window_size, size_t send_every, size_t send_first_at); + + optional new_value(float value) final; + + protected: + /// Called by new_value() to compute the filtered result from the current window + virtual float compute_result() = 0; + + /// Access the sliding window values (ring buffer implementation) + /// Use: for (size_t i = 0; i < window_count_; i++) { float val = window_[i]; } + FixedVector window_; + size_t window_head_{0}; ///< Index where next value will be written + size_t window_count_{0}; ///< Number of valid values in window (0 to window_size_) + size_t window_size_; ///< Maximum window size + size_t send_every_; ///< Send result every N values + size_t send_at_; ///< Counter for send_every +}; + +/** Base class for Min/Max filters. + * + * Provides a templated helper to find extremum values efficiently. + */ +class MinMaxFilter : public SlidingWindowFilter { + public: + using SlidingWindowFilter::SlidingWindowFilter; + + protected: + /// Helper to find min or max value in window, skipping NaN values + /// Usage: find_extremum_>() for min, find_extremum_>() for max + template float find_extremum_() { + float result = NAN; + Compare comp; + for (size_t i = 0; i < this->window_count_; i++) { + float v = this->window_[i]; + if (!std::isnan(v)) { + result = std::isnan(result) ? v : (comp(v, result) ? v : result); + } + } + return result; + } +}; + +/** Base class for filters that need a sorted window (Median, Quantile). + * + * Extends SlidingWindowFilter to provide a helper that filters out NaN values. + * Derived classes use std::nth_element for efficient partial sorting. + */ +class SortedWindowFilter : public SlidingWindowFilter { + public: + using SlidingWindowFilter::SlidingWindowFilter; + + protected: + /// Helper to get non-NaN values from the window (not sorted - caller will use nth_element) + /// Returns empty FixedVector if all values are NaN + FixedVector get_window_values_(); +}; + /** Simple quantile filter. * - * Takes the quantile of the last values and pushes it out every . + * Takes the quantile of the last values and pushes it out every . */ -class QuantileFilter : public Filter { +class QuantileFilter : public SortedWindowFilter { public: /** Construct a QuantileFilter. * @@ -61,25 +125,18 @@ class QuantileFilter : public Filter { */ explicit QuantileFilter(size_t window_size, size_t send_every, size_t send_first_at, float quantile); - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); - void set_quantile(float quantile); + void set_quantile(float quantile) { this->quantile_ = quantile; } protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result() override; float quantile_; }; /** Simple median filter. * - * Takes the median of the last values and pushes it out every . + * Takes the median of the last values and pushes it out every . */ -class MedianFilter : public Filter { +class MedianFilter : public SortedWindowFilter { public: /** Construct a MedianFilter. * @@ -89,18 +146,10 @@ class MedianFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MedianFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using SortedWindowFilter::SortedWindowFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result() override; }; /** Simple skip filter. @@ -123,9 +172,9 @@ class SkipInitialFilter : public Filter { /** Simple min filter. * - * Takes the min of the last values and pushes it out every . + * Takes the min of the last values and pushes it out every . */ -class MinFilter : public Filter { +class MinFilter : public MinMaxFilter { public: /** Construct a MinFilter. * @@ -135,25 +184,17 @@ class MinFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MinFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using MinMaxFilter::MinMaxFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result() override; }; /** Simple max filter. * - * Takes the max of the last values and pushes it out every . + * Takes the max of the last values and pushes it out every . */ -class MaxFilter : public Filter { +class MaxFilter : public MinMaxFilter { public: /** Construct a MaxFilter. * @@ -163,18 +204,10 @@ class MaxFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit MaxFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using MinMaxFilter::MinMaxFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result() override; }; /** Simple sliding window moving average filter. @@ -182,7 +215,7 @@ class MaxFilter : public Filter { * Essentially just takes takes the average of the last window_size values and pushes them out * every send_every. */ -class SlidingWindowMovingAverageFilter : public Filter { +class SlidingWindowMovingAverageFilter : public SlidingWindowFilter { public: /** Construct a SlidingWindowMovingAverageFilter. * @@ -192,18 +225,10 @@ class SlidingWindowMovingAverageFilter : public Filter { * on startup being published on the first *raw* value, so with no filter applied. Must be less than or equal to * send_every. */ - explicit SlidingWindowMovingAverageFilter(size_t window_size, size_t send_every, size_t send_first_at); - - optional new_value(float value) override; - - void set_send_every(size_t send_every); - void set_window_size(size_t window_size); + using SlidingWindowFilter::SlidingWindowFilter; protected: - std::deque queue_; - size_t send_every_; - size_t send_at_; - size_t window_size_; + float compute_result() override; }; /** Simple exponential moving average filter. @@ -476,5 +501,81 @@ class ToNTCTemperatureFilter : public Filter { double c_; }; +/** Base class for streaming filters (batch windows where window_size == send_every). + * + * When window_size equals send_every, we don't need a sliding window. + * This base class handles the common batching logic. + */ +class StreamingFilter : public Filter { + public: + StreamingFilter(size_t window_size, size_t send_first_at); + + optional new_value(float value) final; + + protected: + /// Called by new_value() to process each value in the batch + virtual void process_value(float value) = 0; + + /// Called by new_value() to compute the result after collecting window_size values + virtual float compute_batch_result() = 0; + + /// Called by new_value() to reset internal state after sending a result + virtual void reset_batch() = 0; + + size_t window_size_; + size_t count_{0}; + size_t send_first_at_; + bool first_send_{true}; +}; + +/** Streaming min filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only the minimum value. + */ +class StreamingMinFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float current_min_{NAN}; +}; + +/** Streaming max filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only the maximum value. + */ +class StreamingMaxFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float current_max_{NAN}; +}; + +/** Streaming moving average filter for batch windows (window_size == send_every). + * + * Uses O(1) memory instead of O(n) by tracking only sum and count. + */ +class StreamingMovingAverageFilter : public StreamingFilter { + public: + using StreamingFilter::StreamingFilter; + + protected: + void process_value(float value) override; + float compute_batch_result() override; + void reset_batch() override; + + float sum_{0.0f}; + size_t valid_count_{0}; +}; + } // namespace sensor } // namespace esphome diff --git a/tests/components/sensor/common.yaml b/tests/components/sensor/common.yaml new file mode 100644 index 0000000000..ace7d0a38a --- /dev/null +++ b/tests/components/sensor/common.yaml @@ -0,0 +1,101 @@ +sensor: + # Source sensor for testing filters + - platform: template + name: "Source Sensor" + id: source_sensor + lambda: return 42.0; + update_interval: 1s + + # Streaming filters (window_size == send_every) - uses StreamingFilter base class + - platform: copy + source_id: source_sensor + name: "Streaming Min Filter" + filters: + - min: + window_size: 10 + send_every: 10 # Batch window → StreamingMinFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Max Filter" + filters: + - max: + window_size: 10 + send_every: 10 # Batch window → StreamingMaxFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 10 # Batch window → StreamingMovingAverageFilter + + # Sliding window filters (window_size != send_every) - uses SlidingWindowFilter base class with ring buffer + - platform: copy + source_id: source_sensor + name: "Sliding Min Filter" + filters: + - min: + window_size: 10 + send_every: 5 # Sliding window → MinFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Max Filter" + filters: + - max: + window_size: 10 + send_every: 5 # Sliding window → MaxFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Median Filter" + filters: + - median: + window_size: 10 + send_every: 5 # Sliding window → MedianFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Quantile Filter" + filters: + - quantile: + window_size: 10 + send_every: 5 + quantile: 0.9 # Sliding window → QuantileFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 5 # Sliding window → SlidingWindowMovingAverageFilter with ring buffer + + # Edge cases + - platform: copy + source_id: source_sensor + name: "Large Batch Window Min" + filters: + - min: + window_size: 1000 + send_every: 1000 # Large batch → StreamingMinFilter (4 bytes, not 4KB) + + - platform: copy + source_id: source_sensor + name: "Small Sliding Window" + filters: + - median: + window_size: 3 + send_every: 1 # Frequent output → MedianFilter with 3-element ring buffer + + # send_first_at parameter test + - platform: copy + source_id: source_sensor + name: "Early Send Filter" + filters: + - max: + window_size: 10 + send_every: 10 + send_first_at: 1 # Send after first value diff --git a/tests/components/sensor/test.esp8266-ard.yaml b/tests/components/sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/integration/README.md b/tests/integration/README.md index 8fce81bb80..2a6b6fe564 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -7,6 +7,7 @@ This directory contains end-to-end integration tests for ESPHome, focusing on te - `conftest.py` - Common fixtures and utilities - `const.py` - Constants used throughout the integration tests - `types.py` - Type definitions for fixtures and functions +- `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`, `build_key_to_entity_mapping`) - `fixtures/` - YAML configuration files for tests - `test_*.py` - Individual test files @@ -26,6 +27,32 @@ The `yaml_config` fixture automatically loads YAML configurations based on the t - `reserved_tcp_port` - Reserves a TCP port by holding the socket open until ESPHome needs it - `unused_tcp_port` - Provides the reserved port number for each test +### Helper Utilities + +#### InitialStateHelper (`state_utils.py`) + +The `InitialStateHelper` class solves a common problem in integration tests: when an API client connects, ESPHome automatically broadcasts the current state of all entities. This can interfere with tests that want to track only new state changes triggered by test actions. + +**What it does:** +- Tracks all entities (except stateless ones like buttons) +- Swallows the first state broadcast for each entity +- Forwards all subsequent state changes to your test callback +- Provides `wait_for_initial_states()` to synchronize before test actions + +**When to use it:** +- Any test that triggers entity state changes and needs to verify them +- Tests that would otherwise see duplicate or unexpected states +- Tests that need clean separation between initial state and test-triggered changes + +**Implementation details:** +- Uses `(device_id, key)` tuples to uniquely identify entities across devices +- Automatically excludes `ButtonInfo` entities (stateless) +- Provides debug logging to track state reception (use `--log-cli-level=DEBUG`) +- Safe for concurrent use with multiple entity types + +**Future work:** +Consider converting existing integration tests to use `InitialStateHelper` for more reliable state tracking and to eliminate race conditions related to initial state broadcasts. + ### Writing Tests The simplest way to write a test is to use the `run_compiled` and `api_client_connected` fixtures: @@ -125,6 +152,54 @@ async def test_my_sensor( ``` ##### State Subscription Pattern + +**Recommended: Using InitialStateHelper** + +When an API client connects, ESPHome automatically sends the current state of all entities. The `InitialStateHelper` (from `state_utils.py`) handles this by swallowing these initial states and only forwarding subsequent state changes to your test callback: + +```python +from .state_utils import InitialStateHelper + +# Track state changes with futures +loop = asyncio.get_running_loop() +states: dict[int, EntityState] = {} +state_future: asyncio.Future[EntityState] = loop.create_future() + +def on_state(state: EntityState) -> None: + """This callback only receives NEW state changes, not initial states.""" + states[state.key] = state + # Check for specific condition using isinstance + if isinstance(state, SensorState) and state.state == expected_value: + if not state_future.done(): + state_future.set_result(state) + +# Get entities and set up state synchronization +entities, services = await client.list_entities_services() +initial_state_helper = InitialStateHelper(entities) + +# Subscribe with the wrapper that filters initial states +client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + +# Wait for all initial states to be broadcast +try: + await initial_state_helper.wait_for_initial_states() +except TimeoutError: + pytest.fail("Timeout waiting for initial states") + +# Now perform your test actions - on_state will only receive new changes +# ... trigger state changes ... + +# Wait for expected state +try: + result = await asyncio.wait_for(state_future, timeout=5.0) +except asyncio.TimeoutError: + pytest.fail(f"Expected state not received. Got: {list(states.values())}") +``` + +**Legacy: Manual State Tracking** + +If you need to handle initial states manually (not recommended for new tests): + ```python # Track state changes with futures loop = asyncio.get_running_loop() diff --git a/tests/integration/fixtures/sensor_filters_batch_window.yaml b/tests/integration/fixtures/sensor_filters_batch_window.yaml new file mode 100644 index 0000000000..58a254c215 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_batch_window.yaml @@ -0,0 +1,58 @@ +esphome: + name: test-batch-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # Batch window filters (window_size == send_every) - use streaming filters + - platform: copy + source_id: source_sensor + name: "Min Sensor" + id: min_sensor + filters: + - min: + window_size: 5 + send_every: 5 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Max Sensor" + id: max_sensor + filters: + - max: + window_size: 5 + send_every: 5 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Moving Avg Sensor" + id: moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 5 + send_first_at: 1 + +# Button to trigger publishing test values +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - lambda: |- + // Publish 10 values: 1.0, 2.0, ..., 10.0 + for (int i = 1; i <= 10; i++) { + id(source_sensor).publish_state(float(i)); + } diff --git a/tests/integration/fixtures/sensor_filters_nan_handling.yaml b/tests/integration/fixtures/sensor_filters_nan_handling.yaml new file mode 100644 index 0000000000..fcb12cfde5 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_nan_handling.yaml @@ -0,0 +1,84 @@ +esphome: + name: test-nan-handling + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +sensor: + - platform: template + name: "Source NaN Sensor" + id: source_nan_sensor + accuracy_decimals: 2 + + - platform: copy + source_id: source_nan_sensor + name: "Min NaN Sensor" + id: min_nan_sensor + filters: + - min: + window_size: 5 + send_every: 5 + send_first_at: 1 + + - platform: copy + source_id: source_nan_sensor + name: "Max NaN Sensor" + id: max_nan_sensor + filters: + - max: + window_size: 5 + send_every: 5 + send_first_at: 1 + +script: + - id: publish_nan_values_script + then: + - sensor.template.publish: + id: source_nan_sensor + state: 10.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 15.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 12.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_nan_sensor + state: !lambda 'return NAN;' + +button: + - platform: template + name: "Publish NaN Values Button" + id: publish_nan_button + on_press: + - script.execute: publish_nan_values_script diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml new file mode 100644 index 0000000000..ea7a326b8d --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_ring_buffer.yaml @@ -0,0 +1,115 @@ +esphome: + name: test-sliding-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # ACTUAL sliding window filters (window_size != send_every) - use ring buffers + # Window of 5, send every 2 values + - platform: copy + source_id: source_sensor + name: "Sliding Min Sensor" + id: sliding_min_sensor + filters: + - min: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Max Sensor" + id: sliding_max_sensor + filters: + - max: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Median Sensor" + id: sliding_median_sensor + filters: + - median: + window_size: 5 + send_every: 2 + send_first_at: 1 + + - platform: copy + source_id: source_sensor + name: "Sliding Moving Avg Sensor" + id: sliding_moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 2 + send_first_at: 1 + +# Button to trigger publishing test values +script: + - id: publish_values_script + then: + # Publish 10 values: 1.0, 2.0, ..., 10.0 + # With window_size=5, send_every=2, send_first_at=1: + # - Output at position 1: window=[1], min=1, max=1, median=1, avg=1 + # - Output at position 3: window=[1,2,3], min=1, max=3, median=2, avg=2 + # - Output at position 5: window=[1,2,3,4,5], min=1, max=5, median=3, avg=3 + # - Output at position 7: window=[3,4,5,6,7], min=3, max=7, median=5, avg=5 + # - Output at position 9: window=[5,6,7,8,9], min=5, max=9, median=7, avg=7 + - sensor.template.publish: + id: source_sensor + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 4.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 6.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 7.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 9.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 10.0 + +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - script.execute: publish_values_script diff --git a/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml new file mode 100644 index 0000000000..bd5980160b --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_ring_buffer_wraparound.yaml @@ -0,0 +1,72 @@ +esphome: + name: test-ring-buffer-wraparound + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +sensor: + - platform: template + name: "Source Wraparound Sensor" + id: source_wraparound + accuracy_decimals: 2 + + - platform: copy + source_id: source_wraparound + name: "Wraparound Min Sensor" + id: wraparound_min_sensor + filters: + - min: + window_size: 3 + send_every: 3 + send_first_at: 1 + +script: + - id: publish_wraparound_script + then: + # Publish 9 values to test ring buffer wraparound + # Values: 10, 20, 30, 5, 25, 15, 40, 35, 20 + - sensor.template.publish: + id: source_wraparound + state: 10.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 20.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 30.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 25.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 15.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 40.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 35.0 + - delay: 20ms + - sensor.template.publish: + id: source_wraparound + state: 20.0 + +button: + - platform: template + name: "Publish Wraparound Button" + id: publish_wraparound_button + on_press: + - script.execute: publish_wraparound_script diff --git a/tests/integration/fixtures/sensor_filters_sliding_window.yaml b/tests/integration/fixtures/sensor_filters_sliding_window.yaml new file mode 100644 index 0000000000..2055118811 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_sliding_window.yaml @@ -0,0 +1,123 @@ +esphome: + name: test-sliding-window-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensor that we'll use to publish values +sensor: + - platform: template + name: "Source Sensor" + id: source_sensor + accuracy_decimals: 2 + + # Min filter sensor + - platform: copy + source_id: source_sensor + name: "Min Sensor" + id: min_sensor + filters: + - min: + window_size: 5 + send_every: 5 + send_first_at: 1 + + # Max filter sensor + - platform: copy + source_id: source_sensor + name: "Max Sensor" + id: max_sensor + filters: + - max: + window_size: 5 + send_every: 5 + send_first_at: 1 + + # Median filter sensor + - platform: copy + source_id: source_sensor + name: "Median Sensor" + id: median_sensor + filters: + - median: + window_size: 5 + send_every: 5 + send_first_at: 1 + + # Quantile filter sensor (90th percentile) + - platform: copy + source_id: source_sensor + name: "Quantile Sensor" + id: quantile_sensor + filters: + - quantile: + window_size: 5 + send_every: 5 + send_first_at: 1 + quantile: 0.9 + + # Moving average filter sensor + - platform: copy + source_id: source_sensor + name: "Moving Avg Sensor" + id: moving_avg_sensor + filters: + - sliding_window_moving_average: + window_size: 5 + send_every: 5 + send_first_at: 1 + +# Script to publish values with delays +script: + - id: publish_values_script + then: + - sensor.template.publish: + id: source_sensor + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 3.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 4.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 5.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 6.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 7.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 8.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 9.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor + state: 10.0 + +# Button to trigger publishing test values +button: + - platform: template + name: "Publish Values Button" + id: publish_button + on_press: + - script.execute: publish_values_script diff --git a/tests/integration/state_utils.py b/tests/integration/state_utils.py new file mode 100644 index 0000000000..58d6d2790f --- /dev/null +++ b/tests/integration/state_utils.py @@ -0,0 +1,167 @@ +"""Shared utilities for ESPHome integration tests - state handling.""" + +from __future__ import annotations + +import asyncio +import logging + +from aioesphomeapi import ButtonInfo, EntityInfo, EntityState + +_LOGGER = logging.getLogger(__name__) + + +def build_key_to_entity_mapping( + entities: list[EntityInfo], entity_names: list[str] +) -> dict[int, str]: + """Build a mapping from entity keys to entity names. + + Args: + entities: List of entity info objects from the API + entity_names: List of entity names to search for in object_ids + + Returns: + Dictionary mapping entity keys to entity names + """ + key_to_entity: dict[int, str] = {} + for entity in entities: + obj_id = entity.object_id.lower() + for entity_name in entity_names: + if entity_name in obj_id: + key_to_entity[entity.key] = entity_name + break + return key_to_entity + + +class InitialStateHelper: + """Helper to wait for initial states before processing test states. + + When an API client connects, ESPHome sends the current state of all entities. + This helper wraps the user's state callback and swallows the first state for + each entity, then forwards all subsequent states to the user callback. + + Usage: + entities, services = await client.list_entities_services() + helper = InitialStateHelper(entities) + client.subscribe_states(helper.on_state_wrapper(user_callback)) + await helper.wait_for_initial_states() + """ + + def __init__(self, entities: list[EntityInfo]) -> None: + """Initialize the helper. + + Args: + entities: All entities from list_entities_services() + """ + # Set of (device_id, key) tuples waiting for initial state + # Buttons are stateless, so exclude them + self._wait_initial_states = { + (entity.device_id, entity.key) + for entity in entities + if not isinstance(entity, ButtonInfo) + } + # Keep entity info for debugging - use (device_id, key) tuple + self._entities_by_id = { + (entity.device_id, entity.key): entity for entity in entities + } + + # Log all entities + _LOGGER.debug( + "InitialStateHelper: Found %d total entities: %s", + len(entities), + [(type(e).__name__, e.object_id) for e in entities], + ) + + # Log which ones we're waiting for + _LOGGER.debug( + "InitialStateHelper: Waiting for %d entities (excluding ButtonInfo): %s", + len(self._wait_initial_states), + [self._entities_by_id[k].object_id for k in self._wait_initial_states], + ) + + # Log which ones we're NOT waiting for + not_waiting = { + (e.device_id, e.key) for e in entities + } - self._wait_initial_states + if not_waiting: + not_waiting_info = [ + f"{type(self._entities_by_id[k]).__name__}:{self._entities_by_id[k].object_id}" + for k in not_waiting + ] + _LOGGER.debug( + "InitialStateHelper: NOT waiting for %d entities: %s", + len(not_waiting), + not_waiting_info, + ) + + # Create future in the running event loop + self._initial_states_received = asyncio.get_running_loop().create_future() + # If no entities to wait for, mark complete immediately + if not self._wait_initial_states: + self._initial_states_received.set_result(True) + + def on_state_wrapper(self, user_callback): + """Wrap a user callback to track initial states. + + Args: + user_callback: The user's state callback function + + Returns: + Wrapped callback that swallows first state per entity, forwards rest + """ + + def wrapper(state: EntityState) -> None: + """Swallow initial state per entity, forward subsequent states.""" + # Create entity identifier tuple + entity_id = (state.device_id, state.key) + + # Log which entity is sending state + if entity_id in self._entities_by_id: + entity = self._entities_by_id[entity_id] + _LOGGER.debug( + "Received state for %s (type: %s, device_id: %s, key: %d)", + entity.object_id, + type(entity).__name__, + state.device_id, + state.key, + ) + + # If this entity is waiting for initial state + if entity_id in self._wait_initial_states: + # Remove from waiting set + self._wait_initial_states.discard(entity_id) + + _LOGGER.debug( + "Swallowed initial state for %s, %d entities remaining", + self._entities_by_id[entity_id].object_id + if entity_id in self._entities_by_id + else entity_id, + len(self._wait_initial_states), + ) + + # Check if we've now seen all entities + if ( + not self._wait_initial_states + and not self._initial_states_received.done() + ): + _LOGGER.debug("All initial states received") + self._initial_states_received.set_result(True) + + # Don't forward initial state to user + return + + # Forward subsequent states to user callback + _LOGGER.debug("Forwarding state to user callback") + user_callback(state) + + return wrapper + + async def wait_for_initial_states(self, timeout: float = 5.0) -> None: + """Wait for all initial states to be received. + + Args: + timeout: Maximum time to wait in seconds + + Raises: + asyncio.TimeoutError: If initial states aren't received within timeout + """ + await asyncio.wait_for(self._initial_states_received, timeout=timeout) diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py new file mode 100644 index 0000000000..c8be8edce0 --- /dev/null +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -0,0 +1,151 @@ +"""Test sensor ring buffer filter functionality (window_size != send_every).""" + +from __future__ import annotations + +import asyncio + +from aioesphomeapi import EntityState, SensorState +import pytest + +from .state_utils import InitialStateHelper, build_key_to_entity_mapping +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_sensor_filters_ring_buffer( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ring buffer filters (window_size != send_every) work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for each sensor + sensor_states: dict[str, list[float]] = { + "sliding_min": [], + "sliding_max": [], + "sliding_median": [], + "sliding_moving_avg": [], + } + + # Futures to track when we receive expected values + all_updates_received = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values + if state.missing_state: + return + + # Get the sensor name from the key mapping + sensor_name = key_to_sensor.get(state.key) + if not sensor_name or sensor_name not in sensor_states: + return + + sensor_states[sensor_name].append(state.state) + + # Check if we've received enough updates from all sensors + # With send_every=2, send_first_at=1, we expect 5 outputs per sensor + if ( + len(sensor_states["sliding_min"]) >= 5 + and len(sensor_states["sliding_max"]) >= 5 + and len(sensor_states["sliding_median"]) >= 5 + and len(sensor_states["sliding_moving_avg"]) >= 5 + and not all_updates_received.done() + ): + all_updates_received.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_entity_mapping( + entities, + [ + "sliding_min", + "sliding_max", + "sliding_median", + "sliding_moving_avg", + ], + ) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Values Button not found" + + # Press the button to publish test values + client.button_command(publish_button.key) + + # Wait for all sensors to receive their values + try: + await asyncio.wait_for(all_updates_received, timeout=10.0) + except TimeoutError: + # Provide detailed failure info + pytest.fail( + f"Timeout waiting for updates. Received states:\n" + f" min: {sensor_states['sliding_min']}\n" + f" max: {sensor_states['sliding_max']}\n" + f" median: {sensor_states['sliding_median']}\n" + f" moving_avg: {sensor_states['sliding_moving_avg']}" + ) + + # Verify we got 5 outputs per sensor (positions 1, 3, 5, 7, 9) + assert len(sensor_states["sliding_min"]) == 5, ( + f"Min sensor should have 5 values, got {len(sensor_states['sliding_min'])}: {sensor_states['sliding_min']}" + ) + assert len(sensor_states["sliding_max"]) == 5 + assert len(sensor_states["sliding_median"]) == 5 + assert len(sensor_states["sliding_moving_avg"]) == 5 + + # Verify the values at each output position + # Position 1: window=[1] + assert sensor_states["sliding_min"][0] == pytest.approx(1.0) + assert sensor_states["sliding_max"][0] == pytest.approx(1.0) + assert sensor_states["sliding_median"][0] == pytest.approx(1.0) + assert sensor_states["sliding_moving_avg"][0] == pytest.approx(1.0) + + # Position 3: window=[1,2,3] + assert sensor_states["sliding_min"][1] == pytest.approx(1.0) + assert sensor_states["sliding_max"][1] == pytest.approx(3.0) + assert sensor_states["sliding_median"][1] == pytest.approx(2.0) + assert sensor_states["sliding_moving_avg"][1] == pytest.approx(2.0) + + # Position 5: window=[1,2,3,4,5] + assert sensor_states["sliding_min"][2] == pytest.approx(1.0) + assert sensor_states["sliding_max"][2] == pytest.approx(5.0) + assert sensor_states["sliding_median"][2] == pytest.approx(3.0) + assert sensor_states["sliding_moving_avg"][2] == pytest.approx(3.0) + + # Position 7: window=[3,4,5,6,7] (ring buffer wrapped) + assert sensor_states["sliding_min"][3] == pytest.approx(3.0) + assert sensor_states["sliding_max"][3] == pytest.approx(7.0) + assert sensor_states["sliding_median"][3] == pytest.approx(5.0) + assert sensor_states["sliding_moving_avg"][3] == pytest.approx(5.0) + + # Position 9: window=[5,6,7,8,9] (ring buffer wrapped) + assert sensor_states["sliding_min"][4] == pytest.approx(5.0) + assert sensor_states["sliding_max"][4] == pytest.approx(9.0) + assert sensor_states["sliding_median"][4] == pytest.approx(7.0) + assert sensor_states["sliding_moving_avg"][4] == pytest.approx(7.0) diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py new file mode 100644 index 0000000000..b0688a6536 --- /dev/null +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -0,0 +1,395 @@ +"""Test sensor sliding window filter functionality.""" + +from __future__ import annotations + +import asyncio + +from aioesphomeapi import EntityState, SensorState +import pytest + +from .state_utils import InitialStateHelper, build_key_to_entity_mapping +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_sensor_filters_sliding_window( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that sliding window filters (min, max, median, quantile, moving_average) work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for each sensor + sensor_states: dict[str, list[float]] = { + "min_sensor": [], + "max_sensor": [], + "median_sensor": [], + "quantile_sensor": [], + "moving_avg_sensor": [], + } + + # Futures to track when we receive expected values + min_received = loop.create_future() + max_received = loop.create_future() + median_received = loop.create_future() + quantile_received = loop.create_future() + moving_avg_received = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values + if state.missing_state: + return + + # Get the sensor name from the key mapping + sensor_name = key_to_sensor.get(state.key) + if not sensor_name or sensor_name not in sensor_states: + return + + sensor_states[sensor_name].append(state.state) + + # Check if we received the expected final value + # After publishing 10 values [1.0, 2.0, ..., 10.0], the window has the last 5: [2, 3, 4, 5, 6] + # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) + if ( + sensor_name == "min_sensor" + and state.state == pytest.approx(2.0) + and not min_received.done() + ): + min_received.set_result(True) + elif ( + sensor_name == "max_sensor" + and state.state == pytest.approx(6.0) + and not max_received.done() + ): + max_received.set_result(True) + elif ( + sensor_name == "median_sensor" + and state.state == pytest.approx(4.0) + and not median_received.done() + ): + # Median of [2, 3, 4, 5, 6] = 4 + median_received.set_result(True) + elif ( + sensor_name == "quantile_sensor" + and state.state == pytest.approx(6.0) + and not quantile_received.done() + ): + # 90th percentile of [2, 3, 4, 5, 6] = 6 + quantile_received.set_result(True) + elif ( + sensor_name == "moving_avg_sensor" + and state.state == pytest.approx(4.0) + and not moving_avg_received.done() + ): + # Average of [2, 3, 4, 5, 6] = 4 + moving_avg_received.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_entity_mapping( + entities, + [ + "min_sensor", + "max_sensor", + "median_sensor", + "quantile_sensor", + "moving_avg_sensor", + ], + ) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Values Button not found" + + # Press the button to publish test values + client.button_command(publish_button.key) + + # Wait for all sensors to receive their final values + try: + await asyncio.wait_for( + asyncio.gather( + min_received, + max_received, + median_received, + quantile_received, + moving_avg_received, + ), + timeout=10.0, + ) + except TimeoutError: + # Provide detailed failure info + pytest.fail( + f"Timeout waiting for expected values. Received states:\n" + f" min: {sensor_states['min_sensor']}\n" + f" max: {sensor_states['max_sensor']}\n" + f" median: {sensor_states['median_sensor']}\n" + f" quantile: {sensor_states['quantile_sensor']}\n" + f" moving_avg: {sensor_states['moving_avg_sensor']}" + ) + + # Verify we got the expected values + # With batch_delay: 0ms, we should receive all outputs + # Filters output at positions 1 and 6 (send_every: 5) + assert len(sensor_states["min_sensor"]) == 2, ( + f"Min sensor should have 2 values, got {len(sensor_states['min_sensor'])}: {sensor_states['min_sensor']}" + ) + assert len(sensor_states["max_sensor"]) == 2, ( + f"Max sensor should have 2 values, got {len(sensor_states['max_sensor'])}: {sensor_states['max_sensor']}" + ) + assert len(sensor_states["median_sensor"]) == 2 + assert len(sensor_states["quantile_sensor"]) == 2 + assert len(sensor_states["moving_avg_sensor"]) == 2 + + # Verify the first output (after 1 value: [1]) + assert sensor_states["min_sensor"][0] == pytest.approx(1.0), ( + f"First min should be 1.0, got {sensor_states['min_sensor'][0]}" + ) + assert sensor_states["max_sensor"][0] == pytest.approx(1.0), ( + f"First max should be 1.0, got {sensor_states['max_sensor'][0]}" + ) + assert sensor_states["median_sensor"][0] == pytest.approx(1.0), ( + f"First median should be 1.0, got {sensor_states['median_sensor'][0]}" + ) + assert sensor_states["moving_avg_sensor"][0] == pytest.approx(1.0), ( + f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}" + ) + + # Verify the second output (after 6 values, window has [2, 3, 4, 5, 6]) + assert sensor_states["min_sensor"][1] == pytest.approx(2.0), ( + f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}" + ) + assert sensor_states["max_sensor"][1] == pytest.approx(6.0), ( + f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}" + ) + assert sensor_states["median_sensor"][1] == pytest.approx(4.0), ( + f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}" + ) + assert sensor_states["moving_avg_sensor"][1] == pytest.approx(4.0), ( + f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}" + ) + + +@pytest.mark.asyncio +async def test_sensor_filters_nan_handling( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that sliding window filters handle NaN values correctly.""" + loop = asyncio.get_running_loop() + + # Track states + min_states: list[float] = [] + max_states: list[float] = [] + + # Future to track completion + filters_completed = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values + if state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + + if sensor_name == "min_nan": + min_states.append(state.state) + elif sensor_name == "max_nan": + max_states.append(state.state) + + # Check if both have received their final values + # With batch_delay: 0ms, we should receive 2 outputs each + if ( + len(min_states) >= 2 + and len(max_states) >= 2 + and not filters_completed.done() + ): + filters_completed.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_entity_mapping(entities, ["min_nan", "max_nan"]) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_nan_values_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish NaN Values Button not found" + + # Press the button + client.button_command(publish_button.key) + + # Wait for filters to process + try: + await asyncio.wait_for(filters_completed, timeout=10.0) + except TimeoutError: + pytest.fail( + f"Timeout waiting for NaN handling. Received:\n" + f" min_states: {min_states}\n" + f" max_states: {max_states}" + ) + + # Verify NaN values were ignored + # With batch_delay: 0ms, we should receive both outputs (at positions 1 and 6) + # Position 1: window=[10], min=10, max=10 + # Position 6: window=[NaN, 5, NaN, 15, 8], ignoring NaN -> [5, 15, 8], min=5, max=15 + assert len(min_states) == 2, ( + f"Should have 2 min states, got {len(min_states)}: {min_states}" + ) + assert len(max_states) == 2, ( + f"Should have 2 max states, got {len(max_states)}: {max_states}" + ) + + # First output + assert min_states[0] == pytest.approx(10.0), ( + f"First min should be 10.0, got {min_states[0]}" + ) + assert max_states[0] == pytest.approx(10.0), ( + f"First max should be 10.0, got {max_states[0]}" + ) + + # Second output - verify NaN values were ignored + assert min_states[1] == pytest.approx(5.0), ( + f"Second min should ignore NaN and return 5.0, got {min_states[1]}" + ) + assert max_states[1] == pytest.approx(15.0), ( + f"Second max should ignore NaN and return 15.0, got {max_states[1]}" + ) + + +@pytest.mark.asyncio +async def test_sensor_filters_ring_buffer_wraparound( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ring buffer correctly wraps around when window fills up.""" + loop = asyncio.get_running_loop() + + min_states: list[float] = [] + + test_completed = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track min sensor states.""" + if not isinstance(state, SensorState): + return + + # Skip NaN values + if state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + + if sensor_name == "wraparound_min": + min_states.append(state.state) + # With batch_delay: 0ms, we should receive all 3 outputs + if len(min_states) >= 3 and not test_completed.done(): + test_completed.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities first to build key mapping + entities, services = await client.list_entities_services() + + # Build key-to-sensor mapping + key_to_sensor = build_key_to_entity_mapping(entities, ["wraparound_min"]) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial state + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial state") + + # Find the publish button + publish_button = next( + (e for e in entities if "publish_wraparound_button" in e.object_id.lower()), + None, + ) + assert publish_button is not None, "Publish Wraparound Button not found" + + # Press the button + # Will publish: 10, 20, 30, 5, 25, 15, 40, 35, 20 + client.button_command(publish_button.key) + + # Wait for completion + try: + await asyncio.wait_for(test_completed, timeout=10.0) + except TimeoutError: + pytest.fail(f"Timeout waiting for wraparound test. Received: {min_states}") + + # Verify outputs + # With window_size=3, send_every=3, we get outputs at positions 1, 4, 7 + # Position 1: window=[10], min=10 + # Position 4: window=[20, 30, 5], min=5 + # Position 7: window=[15, 40, 35], min=15 + # With batch_delay: 0ms, we should receive all 3 outputs + assert len(min_states) == 3, ( + f"Should have 3 states, got {len(min_states)}: {min_states}" + ) + assert min_states[0] == pytest.approx(10.0), ( + f"First min should be 10.0, got {min_states[0]}" + ) + assert min_states[1] == pytest.approx(5.0), ( + f"Second min should be 5.0, got {min_states[1]}" + ) + assert min_states[2] == pytest.approx(15.0), ( + f"Third min should be 15.0, got {min_states[2]}" + ) From e9933126400193e9e3cb9ea950044c7e3058b9d3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:15:47 -1000 Subject: [PATCH 151/336] [core] Fix IndexError when OTA devices cannot be resolved (#11311) --- esphome/__main__.py | 4 +- tests/unit_tests/test_main.py | 78 +++++++++++++++++++---------------- 2 files changed, 45 insertions(+), 37 deletions(-) diff --git a/esphome/__main__.py b/esphome/__main__.py index a0b7d16ae9..982e00f5e1 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -185,7 +185,9 @@ def choose_upload_log_host( else: resolved.append(device) if not resolved: - _LOGGER.error("All specified devices: %s could not be resolved.", defaults) + raise EsphomeError( + f"All specified devices {defaults} could not be resolved. Is the device connected to the network?" + ) return resolved # No devices specified, show interactive chooser diff --git a/tests/unit_tests/test_main.py b/tests/unit_tests/test_main.py index 59d0433aa4..73dfe359f0 100644 --- a/tests/unit_tests/test_main.py +++ b/tests/unit_tests/test_main.py @@ -321,12 +321,14 @@ def test_choose_upload_log_host_with_serial_device_no_ports( ) -> None: """Test SERIAL device when no serial ports are found.""" setup_core() - result = choose_upload_log_host( - default="SERIAL", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="SERIAL", + check_default=None, + purpose=Purpose.UPLOADING, + ) assert "No serial ports found, skipping SERIAL device" in caplog.text @@ -367,12 +369,14 @@ def test_choose_upload_log_host_with_ota_device_with_api_config() -> None: """Test OTA device when API is configured (no upload without OTA in config).""" setup_core(config={CONF_API: {}}, address="192.168.1.100") - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) def test_choose_upload_log_host_with_ota_device_with_api_config_logging() -> None: @@ -405,12 +409,14 @@ def test_choose_upload_log_host_with_ota_device_no_fallback() -> None: """Test OTA device with no valid fallback options.""" setup_core() - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) @pytest.mark.usefixtures("mock_choose_prompt") @@ -615,21 +621,19 @@ def test_choose_upload_log_host_empty_defaults_list() -> None: @pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging") -def test_choose_upload_log_host_all_devices_unresolved( - caplog: pytest.LogCaptureFixture, -) -> None: +def test_choose_upload_log_host_all_devices_unresolved() -> None: """Test when all specified devices cannot be resolved.""" setup_core() - result = choose_upload_log_host( - default=["SERIAL", "OTA"], - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] - assert ( - "All specified devices: ['SERIAL', 'OTA'] could not be resolved." in caplog.text - ) + with pytest.raises( + EsphomeError, + match=r"All specified devices \['SERIAL', 'OTA'\] could not be resolved", + ): + choose_upload_log_host( + default=["SERIAL", "OTA"], + check_default=None, + purpose=Purpose.UPLOADING, + ) @pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging") @@ -762,12 +766,14 @@ def test_choose_upload_log_host_no_address_with_ota_config() -> None: """Test OTA device when OTA is configured but no address is set.""" setup_core(config={CONF_OTA: {}}) - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) @dataclass From 25f3b6a959992c4664bde1603100d455359da88c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:17:33 -1000 Subject: [PATCH 152/336] [mqtt] Reduce flash usage by optimizing ArduinoJson assignments (#11340) --- esphome/components/mqtt/mqtt_client.cpp | 7 +-- esphome/components/mqtt/mqtt_component.cpp | 52 ++++++++-------------- 2 files changed, 20 insertions(+), 39 deletions(-) diff --git a/esphome/components/mqtt/mqtt_client.cpp b/esphome/components/mqtt/mqtt_client.cpp index 16f54ab8a0..9055b4421e 100644 --- a/esphome/components/mqtt/mqtt_client.cpp +++ b/esphome/components/mqtt/mqtt_client.cpp @@ -140,11 +140,8 @@ void MQTTClientComponent::send_device_info_() { #endif #ifdef USE_API_NOISE - if (api::global_api_server->get_noise_ctx()->has_psk()) { - root["api_encryption"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; - } else { - root["api_encryption_supported"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; - } + root[api::global_api_server->get_noise_ctx()->has_psk() ? "api_encryption" : "api_encryption_supported"] = + "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; #endif }, 2, this->discovery_info_.retain); diff --git a/esphome/components/mqtt/mqtt_component.cpp b/esphome/components/mqtt/mqtt_component.cpp index 6ceaf219ff..eb6114008a 100644 --- a/esphome/components/mqtt/mqtt_component.cpp +++ b/esphome/components/mqtt/mqtt_component.cpp @@ -85,24 +85,20 @@ bool MQTTComponent::send_discovery_() { } // Fields from EntityBase - if (this->get_entity()->has_own_name()) { - root[MQTT_NAME] = this->friendly_name(); - } else { - root[MQTT_NAME] = ""; - } + root[MQTT_NAME] = this->get_entity()->has_own_name() ? this->friendly_name() : ""; + if (this->is_disabled_by_default()) root[MQTT_ENABLED_BY_DEFAULT] = false; if (!this->get_icon().empty()) root[MQTT_ICON] = this->get_icon(); - switch (this->get_entity()->get_entity_category()) { + const auto entity_category = this->get_entity()->get_entity_category(); + switch (entity_category) { case ENTITY_CATEGORY_NONE: break; case ENTITY_CATEGORY_CONFIG: - root[MQTT_ENTITY_CATEGORY] = "config"; - break; case ENTITY_CATEGORY_DIAGNOSTIC: - root[MQTT_ENTITY_CATEGORY] = "diagnostic"; + root[MQTT_ENTITY_CATEGORY] = entity_category == ENTITY_CATEGORY_CONFIG ? "config" : "diagnostic"; break; } @@ -113,20 +109,14 @@ bool MQTTComponent::send_discovery_() { if (this->command_retain_) root[MQTT_COMMAND_RETAIN] = true; - if (this->availability_ == nullptr) { - if (!global_mqtt_client->get_availability().topic.empty()) { - root[MQTT_AVAILABILITY_TOPIC] = global_mqtt_client->get_availability().topic; - if (global_mqtt_client->get_availability().payload_available != "online") - root[MQTT_PAYLOAD_AVAILABLE] = global_mqtt_client->get_availability().payload_available; - if (global_mqtt_client->get_availability().payload_not_available != "offline") - root[MQTT_PAYLOAD_NOT_AVAILABLE] = global_mqtt_client->get_availability().payload_not_available; - } - } else if (!this->availability_->topic.empty()) { - root[MQTT_AVAILABILITY_TOPIC] = this->availability_->topic; - if (this->availability_->payload_available != "online") - root[MQTT_PAYLOAD_AVAILABLE] = this->availability_->payload_available; - if (this->availability_->payload_not_available != "offline") - root[MQTT_PAYLOAD_NOT_AVAILABLE] = this->availability_->payload_not_available; + const Availability &avail = + this->availability_ == nullptr ? global_mqtt_client->get_availability() : *this->availability_; + if (!avail.topic.empty()) { + root[MQTT_AVAILABILITY_TOPIC] = avail.topic; + if (avail.payload_available != "online") + root[MQTT_PAYLOAD_AVAILABLE] = avail.payload_available; + if (avail.payload_not_available != "offline") + root[MQTT_PAYLOAD_NOT_AVAILABLE] = avail.payload_not_available; } const MQTTDiscoveryInfo &discovery_info = global_mqtt_client->get_discovery_info(); @@ -145,10 +135,8 @@ bool MQTTComponent::send_discovery_() { if (discovery_info.object_id_generator == MQTT_DEVICE_NAME_OBJECT_ID_GENERATOR) root[MQTT_OBJECT_ID] = node_name + "_" + this->get_default_object_id_(); - std::string node_friendly_name = App.get_friendly_name(); - if (node_friendly_name.empty()) { - node_friendly_name = node_name; - } + const std::string &friendly_name_ref = App.get_friendly_name(); + const std::string &node_friendly_name = friendly_name_ref.empty() ? node_name : friendly_name_ref; std::string node_area = App.get_area(); JsonObject device_info = root[MQTT_DEVICE].to(); @@ -158,13 +146,9 @@ bool MQTTComponent::send_discovery_() { #ifdef ESPHOME_PROJECT_NAME device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_PROJECT_VERSION " (ESPHome " ESPHOME_VERSION ")"; const char *model = std::strchr(ESPHOME_PROJECT_NAME, '.'); - if (model == nullptr) { // must never happen but check anyway - device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD; - device_info[MQTT_DEVICE_MANUFACTURER] = ESPHOME_PROJECT_NAME; - } else { - device_info[MQTT_DEVICE_MODEL] = model + 1; - device_info[MQTT_DEVICE_MANUFACTURER] = std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME); - } + device_info[MQTT_DEVICE_MODEL] = model == nullptr ? ESPHOME_BOARD : model + 1; + device_info[MQTT_DEVICE_MANUFACTURER] = + model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME); #else device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time() + ")"; device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD; From 33fea90c19fa3e274e4253fa23bcd24ceb8c4cd3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:26:18 -1000 Subject: [PATCH 153/336] [wifi] Optimize WiFi scanning to reduce copies and heap allocations (#11323) --- esphome/components/wifi/wifi_component.cpp | 40 ++++++++++++---------- esphome/components/wifi/wifi_component.h | 2 +- 2 files changed, 23 insertions(+), 19 deletions(-) diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index 5aa2a03a14..612b11a50f 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -607,10 +607,12 @@ void WiFiComponent::check_scanning_finished() { for (auto &ap : this->sta_) { if (res.matches(ap)) { res.set_matches(true); - if (!this->has_sta_priority(res.get_bssid())) { - this->set_sta_priority(res.get_bssid(), ap.get_priority()); + // Cache priority lookup - do single search instead of 2 separate searches + const bssid_t &bssid = res.get_bssid(); + if (!this->has_sta_priority(bssid)) { + this->set_sta_priority(bssid, ap.get_priority()); } - res.set_priority(this->get_sta_priority(res.get_bssid())); + res.set_priority(this->get_sta_priority(bssid)); break; } } @@ -629,8 +631,9 @@ void WiFiComponent::check_scanning_finished() { return; } - WiFiAP connect_params; - WiFiScanResult scan_res = this->scan_result_[0]; + // Build connection params directly into selected_ap_ to avoid extra copy + const WiFiScanResult &scan_res = this->scan_result_[0]; + WiFiAP &selected = this->selected_ap_; for (auto &config : this->sta_) { // search for matching STA config, at least one will match (from checks before) if (!scan_res.matches(config)) { @@ -639,37 +642,38 @@ void WiFiComponent::check_scanning_finished() { if (config.get_hidden()) { // selected network is hidden, we use the data from the config - connect_params.set_hidden(true); - connect_params.set_ssid(config.get_ssid()); - // don't set BSSID and channel, there might be multiple hidden networks + selected.set_hidden(true); + selected.set_ssid(config.get_ssid()); + // Clear channel and BSSID for hidden networks - there might be multiple hidden networks // but we can't know which one is the correct one. Rely on probe-req with just SSID. + selected.set_channel(0); + selected.set_bssid(optional{}); } else { // selected network is visible, we use the data from the scan // limit the connect params to only connect to exactly this network // (network selection is done during scan phase). - connect_params.set_hidden(false); - connect_params.set_ssid(scan_res.get_ssid()); - connect_params.set_channel(scan_res.get_channel()); - connect_params.set_bssid(scan_res.get_bssid()); + selected.set_hidden(false); + selected.set_ssid(scan_res.get_ssid()); + selected.set_channel(scan_res.get_channel()); + selected.set_bssid(scan_res.get_bssid()); } // copy manual IP (if set) - connect_params.set_manual_ip(config.get_manual_ip()); + selected.set_manual_ip(config.get_manual_ip()); #ifdef USE_WIFI_WPA2_EAP // copy EAP parameters (if set) - connect_params.set_eap(config.get_eap()); + selected.set_eap(config.get_eap()); #endif // copy password (if set) - connect_params.set_password(config.get_password()); + selected.set_password(config.get_password()); break; } yield(); - this->selected_ap_ = connect_params; - this->start_connecting(connect_params, false); + this->start_connecting(this->selected_ap_, false); } void WiFiComponent::dump_config() { @@ -902,7 +906,7 @@ WiFiScanResult::WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t c rssi_(rssi), with_auth_(with_auth), is_hidden_(is_hidden) {} -bool WiFiScanResult::matches(const WiFiAP &config) { +bool WiFiScanResult::matches(const WiFiAP &config) const { if (config.get_hidden()) { // User configured a hidden network, only match actually hidden networks // don't match SSID diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 9d32071b2b..508024a235 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -170,7 +170,7 @@ class WiFiScanResult { public: WiFiScanResult(const bssid_t &bssid, std::string ssid, uint8_t channel, int8_t rssi, bool with_auth, bool is_hidden); - bool matches(const WiFiAP &config); + bool matches(const WiFiAP &config) const; bool get_matches() const; void set_matches(bool matches); From c9312d5c2799a213d338981527907d475bb7844a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:42:17 -1000 Subject: [PATCH 154/336] [script] Fix unbounded queue growth, optimize queued mode (default max_runs=5) (#11308) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> --- esphome/components/script/__init__.py | 17 +- esphome/components/script/script.h | 72 +++++-- tests/integration/fixtures/script_queued.yaml | 170 +++++++++++++++ tests/integration/test_script_queued.py | 203 ++++++++++++++++++ 4 files changed, 442 insertions(+), 20 deletions(-) create mode 100644 tests/integration/fixtures/script_queued.yaml create mode 100644 tests/integration/test_script_queued.py diff --git a/esphome/components/script/__init__.py b/esphome/components/script/__init__.py index e8a8aa5671..8d69981db0 100644 --- a/esphome/components/script/__init__.py +++ b/esphome/components/script/__init__.py @@ -45,13 +45,26 @@ def get_script(script_id): def check_max_runs(value): + # Set default for queued mode to prevent unbounded queue growth + if CONF_MAX_RUNS not in value and value[CONF_MODE] == CONF_QUEUED: + value[CONF_MAX_RUNS] = 5 + if CONF_MAX_RUNS not in value: return value + if value[CONF_MODE] not in [CONF_QUEUED, CONF_PARALLEL]: raise cv.Invalid( - "The option 'max_runs' is only valid in 'queue' and 'parallel' mode.", + "The option 'max_runs' is only valid in 'queued' and 'parallel' mode.", path=[CONF_MAX_RUNS], ) + + # Queued mode must have bounded queue (min 1), parallel mode can be unlimited (0) + if value[CONF_MODE] == CONF_QUEUED and value[CONF_MAX_RUNS] < 1: + raise cv.Invalid( + "The option 'max_runs' must be at least 1 for queued mode.", + path=[CONF_MAX_RUNS], + ) + return value @@ -106,7 +119,7 @@ CONFIG_SCHEMA = automation.validate_automation( cv.Optional(CONF_MODE, default=CONF_SINGLE): cv.one_of( *SCRIPT_MODES, lower=True ), - cv.Optional(CONF_MAX_RUNS): cv.positive_int, + cv.Optional(CONF_MAX_RUNS): cv.int_range(min=0, max=100), cv.Optional(CONF_PARAMETERS, default={}): cv.Schema( { validate_parameter_name: validate_parameter_type, diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index b87402f52e..58fb67a3ea 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -1,10 +1,11 @@ #pragma once +#include +#include #include "esphome/core/automation.h" #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/core/log.h" - -#include namespace esphome { namespace script { @@ -96,23 +97,41 @@ template class RestartScript : public Script { /** A script type that queues new instances that are created. * * Only one instance of the script can be active at a time. + * + * Ring buffer implementation: + * - num_queued_ tracks the number of queued (waiting) instances, NOT including the currently running one + * - queue_front_ points to the next item to execute (read position) + * - Buffer size is max_runs_ - 1 (max total instances minus the running one) + * - Write position is calculated as: (queue_front_ + num_queued_) % (max_runs_ - 1) + * - When an item finishes, queue_front_ advances: (queue_front_ + 1) % (max_runs_ - 1) + * - First execute() runs immediately without queuing (num_queued_ stays 0) + * - Subsequent executes while running are queued starting at position 0 + * - Maximum total instances = max_runs_ (includes 1 running + (max_runs_ - 1) queued) */ template class QueueingScript : public Script, public Component { public: void execute(Ts... x) override { - if (this->is_action_running() || this->num_runs_ > 0) { - // num_runs_ is the number of *queued* instances, so total number of instances is - // num_runs_ + 1 - if (this->max_runs_ != 0 && this->num_runs_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum number of queued runs exceeded!"), + if (this->is_action_running() || this->num_queued_ > 0) { + // num_queued_ is the number of *queued* instances (waiting, not including currently running) + // max_runs_ is the maximum *total* instances (running + queued) + // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) + if (this->num_queued_ + 1 >= this->max_runs_) { + this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' max instances (running + queued) reached!"), LOG_STR_ARG(this->name_)); return; } + // Initialize queue on first queued item (after capacity check) + this->lazy_init_queue_(); + this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"), LOG_STR_ARG(this->name_)); - this->num_runs_++; - this->var_queue_.push(std::make_tuple(x...)); + // Ring buffer: write to (queue_front_ + num_queued_) % queue_capacity + const size_t queue_capacity = static_cast(this->max_runs_ - 1); + size_t write_pos = (this->queue_front_ + this->num_queued_) % queue_capacity; + // Use std::make_unique to replace the unique_ptr + this->var_queue_[write_pos] = std::make_unique>(x...); + this->num_queued_++; return; } @@ -122,29 +141,46 @@ template class QueueingScript : public Script, public Com } void stop() override { - this->num_runs_ = 0; + // Clear all queued items to free memory immediately + // Resetting the array automatically destroys all unique_ptrs and their contents + this->var_queue_.reset(); + this->num_queued_ = 0; + this->queue_front_ = 0; Script::stop(); } void loop() override { - if (this->num_runs_ != 0 && !this->is_action_running()) { - this->num_runs_--; - auto &vars = this->var_queue_.front(); - this->var_queue_.pop(); - this->trigger_tuple_(vars, typename gens::type()); + if (this->num_queued_ != 0 && !this->is_action_running()) { + // Dequeue: decrement count, move tuple out (frees slot), advance read position + this->num_queued_--; + const size_t queue_capacity = static_cast(this->max_runs_ - 1); + auto tuple_ptr = std::move(this->var_queue_[this->queue_front_]); + this->queue_front_ = (this->queue_front_ + 1) % queue_capacity; + this->trigger_tuple_(*tuple_ptr, typename gens::type()); } } void set_max_runs(int max_runs) { max_runs_ = max_runs; } protected: + // Lazy init queue on first use - avoids setup() ordering issues and saves memory + // if script is never executed during this boot cycle + inline void lazy_init_queue_() { + if (!this->var_queue_) { + // Allocate array of max_runs_ - 1 slots for queued items (running item is separate) + // unique_ptr array is zero-initialized, so all slots start as nullptr + this->var_queue_ = std::make_unique>[]>(this->max_runs_ - 1); + } + } + template void trigger_tuple_(const std::tuple &tuple, seq /*unused*/) { this->trigger(std::get(tuple)...); } - int num_runs_ = 0; - int max_runs_ = 0; - std::queue> var_queue_; + int num_queued_ = 0; // Number of queued instances (not including currently running) + int max_runs_ = 0; // Maximum total instances (running + queued) + size_t queue_front_ = 0; // Ring buffer read position (next item to execute) + std::unique_ptr>[]> var_queue_; // Ring buffer of queued parameters }; /** A script type that executes new instances in parallel. diff --git a/tests/integration/fixtures/script_queued.yaml b/tests/integration/fixtures/script_queued.yaml new file mode 100644 index 0000000000..996dd6436f --- /dev/null +++ b/tests/integration/fixtures/script_queued.yaml @@ -0,0 +1,170 @@ +esphome: + name: test-script-queued + +host: +api: + actions: + # Test 1: Queue depth with default max_runs=5 + - action: test_queue_depth + then: + - logger.log: "=== TEST 1: Queue depth (max_runs=5 means 5 total, reject 6-7) ===" + - script.execute: + id: queue_depth_script + value: 1 + - script.execute: + id: queue_depth_script + value: 2 + - script.execute: + id: queue_depth_script + value: 3 + - script.execute: + id: queue_depth_script + value: 4 + - script.execute: + id: queue_depth_script + value: 5 + - script.execute: + id: queue_depth_script + value: 6 + - script.execute: + id: queue_depth_script + value: 7 + + # Test 2: Ring buffer wrap test + - action: test_ring_buffer + then: + - logger.log: "=== TEST 2: Ring buffer wrap (should process A, B, C in order) ===" + - script.execute: + id: wrap_script + msg: "A" + - script.execute: + id: wrap_script + msg: "B" + - script.execute: + id: wrap_script + msg: "C" + + # Test 3: Stop clears queue + - action: test_stop_clears + then: + - logger.log: "=== TEST 3: Stop clears queue (should only see 1, then 'STOPPED') ===" + - script.execute: + id: stop_script + num: 1 + - script.execute: + id: stop_script + num: 2 + - script.execute: + id: stop_script + num: 3 + - delay: 50ms + - logger.log: "STOPPING script now" + - script.stop: stop_script + + # Test 4: Verify rejection (max_runs=3) + - action: test_rejection + then: + - logger.log: "=== TEST 4: Verify rejection (max_runs=3 means 3 total, reject 4-8) ===" + - script.execute: + id: rejection_script + val: 1 + - script.execute: + id: rejection_script + val: 2 + - script.execute: + id: rejection_script + val: 3 + - script.execute: + id: rejection_script + val: 4 + - script.execute: + id: rejection_script + val: 5 + - script.execute: + id: rejection_script + val: 6 + - script.execute: + id: rejection_script + val: 7 + - script.execute: + id: rejection_script + val: 8 + + # Test 5: No parameters test + - action: test_no_params + then: + - logger.log: "=== TEST 5: No params (should process 3 times) ===" + - script.execute: no_params_script + - script.execute: no_params_script + - script.execute: no_params_script + +logger: + level: DEBUG + +script: + # Test script 1: Queue depth test (default max_runs=5) + - id: queue_depth_script + mode: queued + parameters: + value: int + then: + - logger.log: + format: "Queue test: START item %d" + args: ['value'] + - delay: 100ms + - logger.log: + format: "Queue test: END item %d" + args: ['value'] + + # Test script 2: Ring buffer wrap test (max_runs=3) + - id: wrap_script + mode: queued + max_runs: 3 + parameters: + msg: string + then: + - logger.log: + format: "Ring buffer: START '%s'" + args: ['msg.c_str()'] + - delay: 50ms + - logger.log: + format: "Ring buffer: END '%s'" + args: ['msg.c_str()'] + + # Test script 3: Stop test + - id: stop_script + mode: queued + max_runs: 5 + parameters: + num: int + then: + - logger.log: + format: "Stop test: START %d" + args: ['num'] + - delay: 100ms + - logger.log: + format: "Stop test: END %d" + args: ['num'] + + # Test script 4: Rejection test (max_runs=3) + - id: rejection_script + mode: queued + max_runs: 3 + parameters: + val: int + then: + - logger.log: + format: "Rejection test: START %d" + args: ['val'] + - delay: 200ms + - logger.log: + format: "Rejection test: END %d" + args: ['val'] + + # Test script 5: No parameters + - id: no_params_script + mode: queued + then: + - logger.log: "No params: START" + - delay: 50ms + - logger.log: "No params: END" diff --git a/tests/integration/test_script_queued.py b/tests/integration/test_script_queued.py new file mode 100644 index 0000000000..ce1c25b649 --- /dev/null +++ b/tests/integration/test_script_queued.py @@ -0,0 +1,203 @@ +"""Test ESPHome queued script functionality.""" + +from __future__ import annotations + +import asyncio +import re + +import pytest + +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_script_queued( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test comprehensive queued script functionality.""" + loop = asyncio.get_running_loop() + + # Track all test results + test_results = { + "queue_depth": {"processed": [], "rejections": 0}, + "ring_buffer": {"start_order": [], "end_order": []}, + "stop": {"processed": [], "stop_logged": False}, + "rejection": {"processed": [], "rejections": 0}, + "no_params": {"executions": 0}, + } + + # Patterns for Test 1: Queue depth + queue_start = re.compile(r"Queue test: START item (\d+)") + queue_end = re.compile(r"Queue test: END item (\d+)") + queue_reject = re.compile(r"Script 'queue_depth_script' max instances") + + # Patterns for Test 2: Ring buffer + ring_start = re.compile(r"Ring buffer: START '([A-Z])'") + ring_end = re.compile(r"Ring buffer: END '([A-Z])'") + + # Patterns for Test 3: Stop + stop_start = re.compile(r"Stop test: START (\d+)") + stop_log = re.compile(r"STOPPING script now") + + # Patterns for Test 4: Rejection + reject_start = re.compile(r"Rejection test: START (\d+)") + reject_end = re.compile(r"Rejection test: END (\d+)") + reject_reject = re.compile(r"Script 'rejection_script' max instances") + + # Patterns for Test 5: No params + no_params_end = re.compile(r"No params: END") + + # Test completion futures + test1_complete = loop.create_future() + test2_complete = loop.create_future() + test3_complete = loop.create_future() + test4_complete = loop.create_future() + test5_complete = loop.create_future() + + def check_output(line: str) -> None: + """Check log output for all test messages.""" + # Test 1: Queue depth + if match := queue_start.search(line): + item = int(match.group(1)) + if item not in test_results["queue_depth"]["processed"]: + test_results["queue_depth"]["processed"].append(item) + + if match := queue_end.search(line): + item = int(match.group(1)) + if item == 5 and not test1_complete.done(): + test1_complete.set_result(True) + + if queue_reject.search(line): + test_results["queue_depth"]["rejections"] += 1 + + # Test 2: Ring buffer + if match := ring_start.search(line): + msg = match.group(1) + test_results["ring_buffer"]["start_order"].append(msg) + + if match := ring_end.search(line): + msg = match.group(1) + test_results["ring_buffer"]["end_order"].append(msg) + if ( + len(test_results["ring_buffer"]["end_order"]) == 3 + and not test2_complete.done() + ): + test2_complete.set_result(True) + + # Test 3: Stop + if match := stop_start.search(line): + item = int(match.group(1)) + if item not in test_results["stop"]["processed"]: + test_results["stop"]["processed"].append(item) + + if stop_log.search(line): + test_results["stop"]["stop_logged"] = True + # Give time for any queued items to be cleared + if not test3_complete.done(): + loop.call_later( + 0.3, + lambda: test3_complete.set_result(True) + if not test3_complete.done() + else None, + ) + + # Test 4: Rejection + if match := reject_start.search(line): + item = int(match.group(1)) + if item not in test_results["rejection"]["processed"]: + test_results["rejection"]["processed"].append(item) + + if match := reject_end.search(line): + item = int(match.group(1)) + if item == 3 and not test4_complete.done(): + test4_complete.set_result(True) + + if reject_reject.search(line): + test_results["rejection"]["rejections"] += 1 + + # Test 5: No params + if no_params_end.search(line): + test_results["no_params"]["executions"] += 1 + if ( + test_results["no_params"]["executions"] == 3 + and not test5_complete.done() + ): + test5_complete.set_result(True) + + async with ( + run_compiled(yaml_config, line_callback=check_output), + api_client_connected() as client, + ): + # Get services + _, services = await client.list_entities_services() + + # Test 1: Queue depth limit + test_service = next((s for s in services if s.name == "test_queue_depth"), None) + assert test_service is not None, "test_queue_depth service not found" + client.execute_service(test_service, {}) + await asyncio.wait_for(test1_complete, timeout=2.0) + await asyncio.sleep(0.1) # Give time for rejections + + # Verify Test 1 + assert sorted(test_results["queue_depth"]["processed"]) == [1, 2, 3, 4, 5], ( + f"Test 1: Expected to process items 1-5 (max_runs=5 means 5 total), got {sorted(test_results['queue_depth']['processed'])}" + ) + assert test_results["queue_depth"]["rejections"] >= 2, ( + "Test 1: Expected at least 2 rejection warnings (items 6-7 should be rejected)" + ) + + # Test 2: Ring buffer order + test_service = next((s for s in services if s.name == "test_ring_buffer"), None) + assert test_service is not None, "test_ring_buffer service not found" + client.execute_service(test_service, {}) + await asyncio.wait_for(test2_complete, timeout=2.0) + + # Verify Test 2 + assert test_results["ring_buffer"]["start_order"] == ["A", "B", "C"], ( + f"Test 2: Expected start order [A, B, C], got {test_results['ring_buffer']['start_order']}" + ) + assert test_results["ring_buffer"]["end_order"] == ["A", "B", "C"], ( + f"Test 2: Expected end order [A, B, C], got {test_results['ring_buffer']['end_order']}" + ) + + # Test 3: Stop clears queue + test_service = next((s for s in services if s.name == "test_stop_clears"), None) + assert test_service is not None, "test_stop_clears service not found" + client.execute_service(test_service, {}) + await asyncio.wait_for(test3_complete, timeout=2.0) + + # Verify Test 3 + assert test_results["stop"]["stop_logged"], ( + "Test 3: Stop command was not logged" + ) + assert test_results["stop"]["processed"] == [1], ( + f"Test 3: Expected only item 1 to process, got {test_results['stop']['processed']}" + ) + + # Test 4: Rejection enforcement (max_runs=3) + test_service = next((s for s in services if s.name == "test_rejection"), None) + assert test_service is not None, "test_rejection service not found" + client.execute_service(test_service, {}) + await asyncio.wait_for(test4_complete, timeout=2.0) + await asyncio.sleep(0.1) # Give time for rejections + + # Verify Test 4 + assert sorted(test_results["rejection"]["processed"]) == [1, 2, 3], ( + f"Test 4: Expected to process items 1-3 (max_runs=3 means 3 total), got {sorted(test_results['rejection']['processed'])}" + ) + assert test_results["rejection"]["rejections"] == 5, ( + f"Test 4: Expected 5 rejections (items 4-8), got {test_results['rejection']['rejections']}" + ) + + # Test 5: No parameters + test_service = next((s for s in services if s.name == "test_no_params"), None) + assert test_service is not None, "test_no_params service not found" + client.execute_service(test_service, {}) + await asyncio.wait_for(test5_complete, timeout=2.0) + + # Verify Test 5 + assert test_results["no_params"]["executions"] == 3, ( + f"Test 5: Expected 3 executions, got {test_results['no_params']['executions']}" + ) From b0ea3f57de61f7761f1bdca274b631be4d369500 Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Sun, 19 Oct 2025 15:49:05 -0400 Subject: [PATCH 155/336] [esp32] Fix OTA rollback (#11300) Co-authored-by: J. Nick Koston --- esphome/components/esp32/core.cpp | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/esphome/components/esp32/core.cpp b/esphome/components/esp32/core.cpp index f3bdfea2a0..3427c96e70 100644 --- a/esphome/components/esp32/core.cpp +++ b/esphome/components/esp32/core.cpp @@ -6,6 +6,7 @@ #include #include #include +#include #include #include #include @@ -52,6 +53,16 @@ void arch_init() { disableCore1WDT(); #endif #endif + + // If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current + // partition will get rolled back unless it is marked as valid. + esp_ota_img_states_t state; + const esp_partition_t *running = esp_ota_get_running_partition(); + if (esp_ota_get_state_partition(running, &state) == ESP_OK) { + if (state == ESP_OTA_IMG_PENDING_VERIFY) { + esp_ota_mark_app_valid_cancel_rollback(); + } + } } void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); } From 3bdd351d49b399712f701790632968f2931aecb4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:52:33 -1000 Subject: [PATCH 156/336] [wifi] Convert fast_connect to compile-time define, save 156-1024 bytes flash (#11328) --- esphome/components/wifi/__init__.py | 3 +- esphome/components/wifi/wifi_component.cpp | 88 ++++++++++++---------- esphome/components/wifi/wifi_component.h | 10 ++- esphome/core/defines.h | 1 + tests/components/wifi/common-eap.yaml | 1 + 5 files changed, 60 insertions(+), 43 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 1f742dc1a8..494470cb48 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -407,7 +407,8 @@ async def to_code(config): cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT])) cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE])) - cg.add(var.set_fast_connect(config[CONF_FAST_CONNECT])) + if config[CONF_FAST_CONNECT]: + cg.add_define("USE_WIFI_FAST_CONNECT") cg.add(var.set_passive_scan(config[CONF_PASSIVE_SCAN])) if CONF_OUTPUT_POWER in config: cg.add(var.set_output_power(config[CONF_OUTPUT_POWER])) diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index 612b11a50f..c89384d742 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -84,9 +84,9 @@ void WiFiComponent::start() { uint32_t hash = this->has_sta() ? fnv1_hash(App.get_compilation_time()) : 88491487UL; this->pref_ = global_preferences->make_preference(hash, true); - if (this->fast_connect_) { - this->fast_connect_pref_ = global_preferences->make_preference(hash + 1, false); - } +#ifdef USE_WIFI_FAST_CONNECT + this->fast_connect_pref_ = global_preferences->make_preference(hash + 1, false); +#endif SavedWifiSettings save{}; if (this->pref_.load(&save)) { @@ -108,16 +108,16 @@ void WiFiComponent::start() { ESP_LOGV(TAG, "Setting Power Save Option failed"); } - if (this->fast_connect_) { - this->trying_loaded_ap_ = this->load_fast_connect_settings_(); - if (!this->trying_loaded_ap_) { - this->ap_index_ = 0; - this->selected_ap_ = this->sta_[this->ap_index_]; - } - this->start_connecting(this->selected_ap_, false); - } else { - this->start_scanning(); +#ifdef USE_WIFI_FAST_CONNECT + this->trying_loaded_ap_ = this->load_fast_connect_settings_(); + if (!this->trying_loaded_ap_) { + this->ap_index_ = 0; + this->selected_ap_ = this->sta_[this->ap_index_]; } + this->start_connecting(this->selected_ap_, false); +#else + this->start_scanning(); +#endif #ifdef USE_WIFI_AP } else if (this->has_ap()) { this->setup_ap_config_(); @@ -168,13 +168,20 @@ void WiFiComponent::loop() { case WIFI_COMPONENT_STATE_COOLDOWN: { this->status_set_warning(LOG_STR("waiting to reconnect")); if (millis() - this->action_started_ > 5000) { - if (this->fast_connect_ || this->retry_hidden_) { +#ifdef USE_WIFI_FAST_CONNECT + // NOTE: This check may not make sense here as it could interfere with AP cycling + if (!this->selected_ap_.get_bssid().has_value()) + this->selected_ap_ = this->sta_[0]; + this->start_connecting(this->selected_ap_, false); +#else + if (this->retry_hidden_) { if (!this->selected_ap_.get_bssid().has_value()) this->selected_ap_ = this->sta_[0]; this->start_connecting(this->selected_ap_, false); } else { this->start_scanning(); } +#endif } break; } @@ -244,7 +251,6 @@ WiFiComponent::WiFiComponent() { global_wifi_component = this; } bool WiFiComponent::has_ap() const { return this->has_ap_; } bool WiFiComponent::has_sta() const { return !this->sta_.empty(); } -void WiFiComponent::set_fast_connect(bool fast_connect) { this->fast_connect_ = fast_connect; } #ifdef USE_WIFI_11KV_SUPPORT void WiFiComponent::set_btm(bool btm) { this->btm_ = btm; } void WiFiComponent::set_rrm(bool rrm) { this->rrm_ = rrm; } @@ -723,9 +729,9 @@ void WiFiComponent::check_connecting_finished() { this->scan_result_.shrink_to_fit(); } - if (this->fast_connect_) { - this->save_fast_connect_settings_(); - } +#ifdef USE_WIFI_FAST_CONNECT + this->save_fast_connect_settings_(); +#endif return; } @@ -773,31 +779,31 @@ void WiFiComponent::retry_connect() { delay(10); if (!this->is_captive_portal_active_() && !this->is_esp32_improv_active_() && (this->num_retried_ > 3 || this->error_from_callback_)) { - if (this->fast_connect_) { - if (this->trying_loaded_ap_) { - this->trying_loaded_ap_ = false; - this->ap_index_ = 0; // Retry from the first configured AP - } else if (this->ap_index_ >= this->sta_.size() - 1) { - ESP_LOGW(TAG, "No more APs to try"); - this->ap_index_ = 0; - this->restart_adapter(); - } else { - // Try next AP - this->ap_index_++; - } - this->num_retried_ = 0; - this->selected_ap_ = this->sta_[this->ap_index_]; +#ifdef USE_WIFI_FAST_CONNECT + if (this->trying_loaded_ap_) { + this->trying_loaded_ap_ = false; + this->ap_index_ = 0; // Retry from the first configured AP + } else if (this->ap_index_ >= this->sta_.size() - 1) { + ESP_LOGW(TAG, "No more APs to try"); + this->ap_index_ = 0; + this->restart_adapter(); } else { - if (this->num_retried_ > 5) { - // If retry failed for more than 5 times, let's restart STA - this->restart_adapter(); - } else { - // Try hidden networks after 3 failed retries - ESP_LOGD(TAG, "Retrying with hidden networks"); - this->retry_hidden_ = true; - this->num_retried_++; - } + // Try next AP + this->ap_index_++; } + this->num_retried_ = 0; + this->selected_ap_ = this->sta_[this->ap_index_]; +#else + if (this->num_retried_ > 5) { + // If retry failed for more than 5 times, let's restart STA + this->restart_adapter(); + } else { + // Try hidden networks after 3 failed retries + ESP_LOGD(TAG, "Retrying with hidden networks"); + this->retry_hidden_ = true; + this->num_retried_++; + } +#endif } else { this->num_retried_++; } @@ -843,6 +849,7 @@ bool WiFiComponent::is_esp32_improv_active_() { #endif } +#ifdef USE_WIFI_FAST_CONNECT bool WiFiComponent::load_fast_connect_settings_() { SavedWifiFastConnectSettings fast_connect_save{}; @@ -877,6 +884,7 @@ void WiFiComponent::save_fast_connect_settings_() { ESP_LOGD(TAG, "Saved fast_connect settings"); } } +#endif void WiFiAP::set_ssid(const std::string &ssid) { this->ssid_ = ssid; } void WiFiAP::set_bssid(bssid_t bssid) { this->bssid_ = bssid; } diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 508024a235..10aa82a065 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -240,7 +240,6 @@ class WiFiComponent : public Component { void start_scanning(); void check_scanning_finished(); void start_connecting(const WiFiAP &ap, bool two); - void set_fast_connect(bool fast_connect); void set_ap_timeout(uint32_t ap_timeout) { ap_timeout_ = ap_timeout; } void check_connecting_finished(); @@ -364,8 +363,10 @@ class WiFiComponent : public Component { bool is_captive_portal_active_(); bool is_esp32_improv_active_(); +#ifdef USE_WIFI_FAST_CONNECT bool load_fast_connect_settings_(); void save_fast_connect_settings_(); +#endif #ifdef USE_ESP8266 static void wifi_event_callback(System_Event_t *event); @@ -399,7 +400,9 @@ class WiFiComponent : public Component { WiFiAP ap_; optional output_power_; ESPPreferenceObject pref_; +#ifdef USE_WIFI_FAST_CONNECT ESPPreferenceObject fast_connect_pref_; +#endif // Group all 32-bit integers together uint32_t action_started_; @@ -411,14 +414,17 @@ class WiFiComponent : public Component { WiFiComponentState state_{WIFI_COMPONENT_STATE_OFF}; WiFiPowerSaveMode power_save_{WIFI_POWER_SAVE_NONE}; uint8_t num_retried_{0}; +#ifdef USE_WIFI_FAST_CONNECT uint8_t ap_index_{0}; +#endif #if USE_NETWORK_IPV6 uint8_t num_ipv6_addresses_{0}; #endif /* USE_NETWORK_IPV6 */ // Group all boolean values together - bool fast_connect_{false}; +#ifdef USE_WIFI_FAST_CONNECT bool trying_loaded_ap_{false}; +#endif bool retry_hidden_{false}; bool has_ap_{false}; bool handled_connected_state_{false}; diff --git a/esphome/core/defines.h b/esphome/core/defines.h index 1afb296fc0..b1bd7f92d7 100644 --- a/esphome/core/defines.h +++ b/esphome/core/defines.h @@ -199,6 +199,7 @@ #define USE_WEBSERVER_PORT 80 // NOLINT #define USE_WEBSERVER_SORTING #define USE_WIFI_11KV_SUPPORT +#define USE_WIFI_FAST_CONNECT #define USB_HOST_MAX_REQUESTS 16 #ifdef USE_ARDUINO diff --git a/tests/components/wifi/common-eap.yaml b/tests/components/wifi/common-eap.yaml index 779cd6b49a..52319fa5a1 100644 --- a/tests/components/wifi/common-eap.yaml +++ b/tests/components/wifi/common-eap.yaml @@ -1,4 +1,5 @@ wifi: + fast_connect: true networks: - ssid: MySSID eap: From 70cb1793f38111e3ce81c492c9708f5928f37409 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:53:05 -1000 Subject: [PATCH 157/336] [wifi] Optimize WiFi scan results with in-place construction (#11330) --- esphome/components/wifi/wifi_component_esp8266.cpp | 8 ++++---- esphome/components/wifi/wifi_component_esp_idf.cpp | 4 ++-- esphome/components/wifi/wifi_component_libretiny.cpp | 6 +++--- esphome/core/helpers.h | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/esphome/components/wifi/wifi_component_esp8266.cpp b/esphome/components/wifi/wifi_component_esp8266.cpp index 59909b2cb5..4e17c42f41 100644 --- a/esphome/components/wifi/wifi_component_esp8266.cpp +++ b/esphome/components/wifi/wifi_component_esp8266.cpp @@ -706,10 +706,10 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) { this->scan_result_.init(count); for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) { - WiFiScanResult res({it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]}, - std::string(reinterpret_cast(it->ssid), it->ssid_len), it->channel, it->rssi, - it->authmode != AUTH_OPEN, it->is_hidden != 0); - this->scan_result_.push_back(res); + this->scan_result_.emplace_back( + bssid_t{it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]}, + std::string(reinterpret_cast(it->ssid), it->ssid_len), it->channel, it->rssi, it->authmode != AUTH_OPEN, + it->is_hidden != 0); } this->scan_done_ = true; } diff --git a/esphome/components/wifi/wifi_component_esp_idf.cpp b/esphome/components/wifi/wifi_component_esp_idf.cpp index ce1cc961d0..08ecba3598 100644 --- a/esphome/components/wifi/wifi_component_esp_idf.cpp +++ b/esphome/components/wifi/wifi_component_esp_idf.cpp @@ -789,8 +789,8 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) { bssid_t bssid; std::copy(record.bssid, record.bssid + 6, bssid.begin()); std::string ssid(reinterpret_cast(record.ssid)); - WiFiScanResult result(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN, ssid.empty()); - scan_result_.push_back(result); + scan_result_.emplace_back(bssid, ssid, record.primary, record.rssi, record.authmode != WIFI_AUTH_OPEN, + ssid.empty()); } } else if (data->event_base == WIFI_EVENT && data->event_id == WIFI_EVENT_AP_START) { diff --git a/esphome/components/wifi/wifi_component_libretiny.cpp b/esphome/components/wifi/wifi_component_libretiny.cpp index cb179d9022..45e2fba82a 100644 --- a/esphome/components/wifi/wifi_component_libretiny.cpp +++ b/esphome/components/wifi/wifi_component_libretiny.cpp @@ -419,9 +419,9 @@ void WiFiComponent::wifi_scan_done_callback_() { uint8_t *bssid = WiFi.BSSID(i); int32_t channel = WiFi.channel(i); - WiFiScanResult scan({bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]}, std::string(ssid.c_str()), - channel, rssi, authmode != WIFI_AUTH_OPEN, ssid.length() == 0); - this->scan_result_.push_back(scan); + this->scan_result_.emplace_back(bssid_t{bssid[0], bssid[1], bssid[2], bssid[3], bssid[4], bssid[5]}, + std::string(ssid.c_str()), channel, rssi, authmode != WIFI_AUTH_OPEN, + ssid.length() == 0); } WiFi.scanDelete(); this->scan_done_ = true; diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index 326718e974..37a64d46b2 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -281,13 +281,13 @@ template class FixedVector { } } - /// Emplace element without bounds checking - constructs in-place + /// Emplace element without bounds checking - constructs in-place with arguments /// Caller must ensure sufficient capacity was allocated via init() /// Returns reference to the newly constructed element /// NOTE: Caller MUST ensure size_ < capacity_ before calling - T &emplace_back() { - // Use placement new to default-construct the object in pre-allocated memory - new (&data_[size_]) T(); + template T &emplace_back(Args &&...args) { + // Use placement new to construct the object in pre-allocated memory + new (&data_[size_]) T(std::forward(args)...); size_++; return data_[size_ - 1]; } From 9c235b4140019387c59c0eaa8d06effff5826f46 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Oct 2025 23:45:42 -1000 Subject: [PATCH 158/336] [datetime] Fix DateTimeStateTrigger compilation when time component is not used (#11287) --- esphome/components/datetime/datetime_base.h | 2 -- 1 file changed, 2 deletions(-) diff --git a/esphome/components/datetime/datetime_base.h b/esphome/components/datetime/datetime_base.h index b7645f5539..b5f54ac96f 100644 --- a/esphome/components/datetime/datetime_base.h +++ b/esphome/components/datetime/datetime_base.h @@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase { #endif }; -#ifdef USE_TIME class DateTimeStateTrigger : public Trigger { public: explicit DateTimeStateTrigger(DateTimeBase *parent) { parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); }); } }; -#endif } // namespace datetime } // namespace esphome From 0d612fecfc79fd629c6aca86fc7714430aee6031 Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Thu, 16 Oct 2025 11:28:52 -0400 Subject: [PATCH 159/336] [core] Add ESP32 ROM functions to reserved ids (#11293) --- esphome/config_validation.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/esphome/config_validation.py b/esphome/config_validation.py index 7aaba886e3..738c3dfe50 100644 --- a/esphome/config_validation.py +++ b/esphome/config_validation.py @@ -244,6 +244,20 @@ RESERVED_IDS = [ "uart0", "uart1", "uart2", + # ESP32 ROM functions + "crc16_be", + "crc16_le", + "crc32_be", + "crc32_le", + "crc8_be", + "crc8_le", + "dbg_state", + "debug_timer", + "one_bits", + "recv_packet", + "send_packet", + "check_pos", + "software_reset", ] From bb24ad4a30aa43331bed16308355d64d679e9444 Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Thu, 16 Oct 2025 11:29:05 -0400 Subject: [PATCH 160/336] [htu21d] Revert register address change (#11291) --- esphome/components/htu21d/htu21d.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/htu21d/htu21d.cpp b/esphome/components/htu21d/htu21d.cpp index a7aae16f17..c5d91d3dd0 100644 --- a/esphome/components/htu21d/htu21d.cpp +++ b/esphome/components/htu21d/htu21d.cpp @@ -9,8 +9,8 @@ static const char *const TAG = "htu21d"; static const uint8_t HTU21D_ADDRESS = 0x40; static const uint8_t HTU21D_REGISTER_RESET = 0xFE; -static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xE3; -static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xE5; +static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xF3; +static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xF5; static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */ static const uint8_t HTU21D_REGISTER_STATUS = 0xE7; static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */ From 913095f6bebe031bb354ba295aae0d353cd401f2 Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Thu, 16 Oct 2025 16:55:30 -0400 Subject: [PATCH 161/336] [esp32] Reduce tx power on Arduino (#11304) --- esphome/components/esp32/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 724cb2797d..76d0be345f 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -790,6 +790,7 @@ async def to_code(config): add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True) add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True) add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True) + add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True) cg.add_build_flag("-Wno-nonnull-compare") From ea609dc0f60647d7ba13c4cdda2f11dc41585148 Mon Sep 17 00:00:00 2001 From: Daniel Stiner Date: Fri, 17 Oct 2025 09:02:28 +0200 Subject: [PATCH 162/336] [const] Add CONF_OPENTHREAD (#11318) --- esphome/const.py | 1 + 1 file changed, 1 insertion(+) diff --git a/esphome/const.py b/esphome/const.py index 54edcad8c9..c1abb8f530 100644 --- a/esphome/const.py +++ b/esphome/const.py @@ -696,6 +696,7 @@ CONF_OPEN_DRAIN = "open_drain" CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt" CONF_OPEN_DURATION = "open_duration" CONF_OPEN_ENDSTOP = "open_endstop" +CONF_OPENTHREAD = "openthread" CONF_OPERATION = "operation" CONF_OPTIMISTIC = "optimistic" CONF_OPTION = "option" From 8c1bd2fd85b445905a5d99e7246c9b8bdd6fb618 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 17 Oct 2025 08:20:55 -1000 Subject: [PATCH 163/336] [dashboard] Fix binary download with packages using secrets after Path migration (#11313) --- esphome/dashboard/settings.py | 11 ++++- tests/dashboard/test_settings.py | 62 ++++++++++++++++++++++++++ tests/dashboard/test_web_server.py | 71 ++++++++++++++++++++++++++++++ 3 files changed, 143 insertions(+), 1 deletion(-) diff --git a/esphome/dashboard/settings.py b/esphome/dashboard/settings.py index 35b67c0d23..6035b4a1d6 100644 --- a/esphome/dashboard/settings.py +++ b/esphome/dashboard/settings.py @@ -10,6 +10,10 @@ from esphome.helpers import get_bool_env from .util.password import password_hash +# Sentinel file name used for CORE.config_path when dashboard initializes. +# This ensures .parent returns the config directory instead of root. +_DASHBOARD_SENTINEL_FILE = "___DASHBOARD_SENTINEL___.yaml" + class DashboardSettings: """Settings for the dashboard.""" @@ -48,7 +52,12 @@ class DashboardSettings: self.config_dir = Path(args.configuration) self.absolute_config_dir = self.config_dir.resolve() self.verbose = args.verbose - CORE.config_path = self.config_dir / "." + # Set to a sentinel file so .parent gives us the config directory. + # Previously this was `os.path.join(self.config_dir, ".")` which worked because + # os.path.dirname("/config/.") returns "/config", but Path("/config/.").parent + # normalizes to Path("/config") first, then .parent returns Path("/"), breaking + # secret resolution. Using a sentinel file ensures .parent gives the correct directory. + CORE.config_path = self.config_dir / _DASHBOARD_SENTINEL_FILE @property def relative_url(self) -> str: diff --git a/tests/dashboard/test_settings.py b/tests/dashboard/test_settings.py index c9097fe5e2..91a8ec70c3 100644 --- a/tests/dashboard/test_settings.py +++ b/tests/dashboard/test_settings.py @@ -2,11 +2,13 @@ from __future__ import annotations +from argparse import Namespace from pathlib import Path import tempfile import pytest +from esphome.core import CORE from esphome.dashboard.settings import DashboardSettings @@ -159,3 +161,63 @@ def test_rel_path_with_numeric_args(dashboard_settings: DashboardSettings) -> No result = dashboard_settings.rel_path("123", "456.789") expected = dashboard_settings.config_dir / "123" / "456.789" assert result == expected + + +def test_config_path_parent_resolves_to_config_dir(tmp_path: Path) -> None: + """Test that CORE.config_path.parent resolves to config_dir after parse_args. + + This is a regression test for issue #11280 where binary download failed + when using packages with secrets after the Path migration in 2025.10.0. + + The issue was that after switching from os.path to Path: + - Before: os.path.dirname("/config/.") → "/config" + - After: Path("/config/.").parent → Path("/") (normalized first!) + + The fix uses a sentinel file so .parent returns the correct directory: + - Fixed: Path("/config/___DASHBOARD_SENTINEL___.yaml").parent → Path("/config") + """ + # Create test directory structure with secrets and packages + config_dir = tmp_path / "config" + config_dir.mkdir() + + # Create secrets.yaml with obviously fake test values + secrets_file = config_dir / "secrets.yaml" + secrets_file.write_text( + "wifi_ssid: TEST-DUMMY-SSID\n" + "wifi_password: not-a-real-password-just-for-testing\n" + ) + + # Create package file that uses secrets + package_file = config_dir / "common.yaml" + package_file.write_text( + "wifi:\n ssid: !secret wifi_ssid\n password: !secret wifi_password\n" + ) + + # Create main device config that includes the package + device_config = config_dir / "test-device.yaml" + device_config.write_text( + "esphome:\n name: test-device\n\npackages:\n common: !include common.yaml\n" + ) + + # Set up dashboard settings with our test config directory + settings = DashboardSettings() + args = Namespace( + configuration=str(config_dir), + password=None, + username=None, + ha_addon=False, + verbose=False, + ) + settings.parse_args(args) + + # Verify that CORE.config_path.parent correctly points to the config directory + # This is critical for secret resolution in yaml_util.py which does: + # main_config_dir = CORE.config_path.parent + # main_secret_yml = main_config_dir / "secrets.yaml" + assert CORE.config_path.parent == config_dir.resolve() + assert (CORE.config_path.parent / "secrets.yaml").exists() + assert (CORE.config_path.parent / "common.yaml").exists() + + # Verify that CORE.config_path itself uses the sentinel file + assert CORE.config_path.name == "___DASHBOARD_SENTINEL___.yaml" + assert not CORE.config_path.exists() # Sentinel file doesn't actually exist diff --git a/tests/dashboard/test_web_server.py b/tests/dashboard/test_web_server.py index 5bbe7e78fc..6c424e56d4 100644 --- a/tests/dashboard/test_web_server.py +++ b/tests/dashboard/test_web_server.py @@ -1,5 +1,6 @@ from __future__ import annotations +from argparse import Namespace import asyncio from collections.abc import Generator from contextlib import asynccontextmanager @@ -17,6 +18,8 @@ from tornado.ioloop import IOLoop from tornado.testing import bind_unused_port from tornado.websocket import WebSocketClientConnection, websocket_connect +from esphome import yaml_util +from esphome.core import CORE from esphome.dashboard import web_server from esphome.dashboard.const import DashboardEvent from esphome.dashboard.core import DASHBOARD @@ -1302,3 +1305,71 @@ async def test_dashboard_subscriber_refresh_event( # Give it a moment to clean up await asyncio.sleep(0.01) + + +@pytest.mark.asyncio +async def test_dashboard_yaml_loading_with_packages_and_secrets( + tmp_path: Path, +) -> None: + """Test dashboard YAML loading with packages referencing secrets. + + This is a regression test for issue #11280 where binary download failed + when using packages with secrets after the Path migration in 2025.10.0. + + This test verifies that CORE.config_path initialization in the dashboard + allows yaml_util.load_yaml() to correctly resolve secrets from packages. + """ + # Create test directory structure with secrets and packages + config_dir = tmp_path / "config" + config_dir.mkdir() + + # Create secrets.yaml with obviously fake test values + secrets_file = config_dir / "secrets.yaml" + secrets_file.write_text( + "wifi_ssid: TEST-DUMMY-SSID\n" + "wifi_password: not-a-real-password-just-for-testing\n" + ) + + # Create package file that uses secrets + package_file = config_dir / "common.yaml" + package_file.write_text( + "wifi:\n ssid: !secret wifi_ssid\n password: !secret wifi_password\n" + ) + + # Create main device config that includes the package + device_config = config_dir / "test-download-secrets.yaml" + device_config.write_text( + "esphome:\n name: test-download-secrets\n platform: ESP32\n board: esp32dev\n\n" + "packages:\n common: !include common.yaml\n" + ) + + # Initialize DASHBOARD settings with our test config directory + # This is what sets CORE.config_path - the critical code path for the bug + args = Namespace( + configuration=str(config_dir), + password=None, + username=None, + ha_addon=False, + verbose=False, + ) + DASHBOARD.settings.parse_args(args) + + # With the fix: CORE.config_path should be config_dir / "___DASHBOARD_SENTINEL___.yaml" + # so CORE.config_path.parent would be config_dir + # Without the fix: CORE.config_path is config_dir / "." which normalizes to config_dir + # so CORE.config_path.parent would be tmp_path (the parent of config_dir) + + # The fix ensures CORE.config_path.parent points to config_dir + assert CORE.config_path.parent == config_dir.resolve(), ( + f"CORE.config_path.parent should point to config_dir. " + f"Got {CORE.config_path.parent}, expected {config_dir.resolve()}. " + f"CORE.config_path is {CORE.config_path}" + ) + + # Now load the YAML with packages that reference secrets + # This is where the bug would manifest - yaml_util.load_yaml would fail + # to find secrets.yaml because CORE.config_path.parent pointed to the wrong place + config = yaml_util.load_yaml(device_config) + # If we get here, secret resolution worked! + assert "esphome" in config + assert config["esphome"]["name"] == "test-download-secrets" From 1483cee0fb3c558dcd6b5b0caee124c983cdf487 Mon Sep 17 00:00:00 2001 From: tomaszduda23 Date: Sat, 18 Oct 2025 19:32:12 +0200 Subject: [PATCH 164/336] [dashboard] fix migration to Path (#11342) Co-authored-by: J. Nick Koston --- esphome/dashboard/web_server.py | 3 +- tests/dashboard/test_web_server.py | 194 +++++++++++++++++++++++++++++ 2 files changed, 196 insertions(+), 1 deletion(-) diff --git a/esphome/dashboard/web_server.py b/esphome/dashboard/web_server.py index a79c67c3d2..804a2b99af 100644 --- a/esphome/dashboard/web_server.py +++ b/esphome/dashboard/web_server.py @@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler): "download", f"{storage_json.name}-{file_name}", ) - path = storage_json.firmware_bin_path.with_name(file_name) + + path = storage_json.firmware_bin_path.parent.joinpath(file_name) if not path.is_file(): args = ["esphome", "idedata", settings.rel_path(configuration)] diff --git a/tests/dashboard/test_web_server.py b/tests/dashboard/test_web_server.py index 6c424e56d4..385841b1c8 100644 --- a/tests/dashboard/test_web_server.py +++ b/tests/dashboard/test_web_server.py @@ -35,6 +35,26 @@ from esphome.zeroconf import DiscoveredImport from .common import get_fixture_path +def get_build_path(base_path: Path, device_name: str) -> Path: + """Get the build directory path for a device. + + This is a test helper that constructs the standard ESPHome build directory + structure. Note: This helper does NOT perform path traversal sanitization + because it's only used in tests where we control the inputs. The actual + web_server.py code handles sanitization in DownloadBinaryRequestHandler.get() + via file_name.replace("..", "").lstrip("/"). + + Args: + base_path: The base temporary path (typically tmp_path from pytest) + device_name: The name of the device (should not contain path separators + in production use, but tests may use it for specific scenarios) + + Returns: + Path to the build directory (.esphome/build/device_name) + """ + return base_path / ".esphome" / "build" / device_name + + class DashboardTestHelper: def __init__(self, io_loop: IOLoop, client: AsyncHTTPClient, port: int) -> None: self.io_loop = io_loop @@ -417,6 +437,180 @@ async def test_download_binary_handler_idedata_fallback( assert response.body == b"bootloader content" +@pytest.mark.asyncio +@pytest.mark.usefixtures("mock_ext_storage_path") +async def test_download_binary_handler_subdirectory_file( + dashboard: DashboardTestHelper, + tmp_path: Path, + mock_storage_json: MagicMock, +) -> None: + """Test the DownloadBinaryRequestHandler.get with file in subdirectory (nRF52 case). + + This is a regression test for issue #11343 where the Path migration broke + downloads for nRF52 firmware files in subdirectories like 'zephyr/zephyr.uf2'. + + The issue was that with_name() doesn't accept path separators: + - Before: path = storage_json.firmware_bin_path.with_name(file_name) + ValueError: Invalid name 'zephyr/zephyr.uf2' + - After: path = storage_json.firmware_bin_path.parent.joinpath(file_name) + Works correctly with subdirectory paths + """ + # Create a fake nRF52 build structure with firmware in subdirectory + build_dir = get_build_path(tmp_path, "nrf52-device") + zephyr_dir = build_dir / "zephyr" + zephyr_dir.mkdir(parents=True) + + # Create the main firmware binary (would be in build root) + firmware_file = build_dir / "firmware.bin" + firmware_file.write_bytes(b"main firmware") + + # Create the UF2 file in zephyr subdirectory (nRF52 specific) + uf2_file = zephyr_dir / "zephyr.uf2" + uf2_file.write_bytes(b"nRF52 UF2 firmware content") + + # Mock storage JSON + mock_storage = Mock() + mock_storage.name = "nrf52-device" + mock_storage.firmware_bin_path = firmware_file + mock_storage_json.load.return_value = mock_storage + + # Request the UF2 file with subdirectory path + response = await dashboard.fetch( + "/download.bin?configuration=nrf52-device.yaml&file=zephyr/zephyr.uf2", + method="GET", + ) + assert response.code == 200 + assert response.body == b"nRF52 UF2 firmware content" + assert response.headers["Content-Type"] == "application/octet-stream" + assert "attachment" in response.headers["Content-Disposition"] + # Download name should be device-name + full file path + assert "nrf52-device-zephyr/zephyr.uf2" in response.headers["Content-Disposition"] + + +@pytest.mark.asyncio +@pytest.mark.usefixtures("mock_ext_storage_path") +async def test_download_binary_handler_subdirectory_file_url_encoded( + dashboard: DashboardTestHelper, + tmp_path: Path, + mock_storage_json: MagicMock, +) -> None: + """Test the DownloadBinaryRequestHandler.get with URL-encoded subdirectory path. + + Verifies that URL-encoded paths (e.g., zephyr%2Fzephyr.uf2) are correctly + decoded and handled, and that custom download names work with subdirectories. + """ + # Create a fake build structure with firmware in subdirectory + build_dir = get_build_path(tmp_path, "test") + zephyr_dir = build_dir / "zephyr" + zephyr_dir.mkdir(parents=True) + + firmware_file = build_dir / "firmware.bin" + firmware_file.write_bytes(b"content") + + uf2_file = zephyr_dir / "zephyr.uf2" + uf2_file.write_bytes(b"content") + + # Mock storage JSON + mock_storage = Mock() + mock_storage.name = "test_device" + mock_storage.firmware_bin_path = firmware_file + mock_storage_json.load.return_value = mock_storage + + # Request with URL-encoded path and custom download name + response = await dashboard.fetch( + "/download.bin?configuration=test.yaml&file=zephyr%2Fzephyr.uf2&download=custom_name.bin", + method="GET", + ) + assert response.code == 200 + assert "custom_name.bin" in response.headers["Content-Disposition"] + + +@pytest.mark.asyncio +@pytest.mark.usefixtures("mock_ext_storage_path") +@pytest.mark.parametrize( + "attack_path", + [ + pytest.param("../../../secrets.yaml", id="basic_traversal"), + pytest.param("..%2F..%2F..%2Fsecrets.yaml", id="url_encoded"), + pytest.param("zephyr/../../../secrets.yaml", id="traversal_with_prefix"), + pytest.param("/etc/passwd", id="absolute_path"), + pytest.param("//etc/passwd", id="double_slash_absolute"), + pytest.param("....//secrets.yaml", id="multiple_dots"), + ], +) +async def test_download_binary_handler_path_traversal_protection( + dashboard: DashboardTestHelper, + tmp_path: Path, + mock_storage_json: MagicMock, + attack_path: str, +) -> None: + """Test that DownloadBinaryRequestHandler prevents path traversal attacks. + + Verifies that attempts to use '..' in file paths are sanitized to prevent + accessing files outside the build directory. Tests multiple attack vectors. + """ + # Create build structure + build_dir = get_build_path(tmp_path, "test") + build_dir.mkdir(parents=True) + firmware_file = build_dir / "firmware.bin" + firmware_file.write_bytes(b"firmware content") + + # Create a sensitive file outside the build directory that should NOT be accessible + sensitive_file = tmp_path / "secrets.yaml" + sensitive_file.write_bytes(b"secret: my_secret_password") + + # Mock storage JSON + mock_storage = Mock() + mock_storage.name = "test_device" + mock_storage.firmware_bin_path = firmware_file + mock_storage_json.load.return_value = mock_storage + + # Attempt path traversal attack - should be blocked + with pytest.raises(HTTPClientError) as exc_info: + await dashboard.fetch( + f"/download.bin?configuration=test.yaml&file={attack_path}", + method="GET", + ) + # Should get 404 (file not found after sanitization) or 500 (idedata fails) + assert exc_info.value.code in (404, 500) + + +@pytest.mark.asyncio +@pytest.mark.usefixtures("mock_ext_storage_path") +async def test_download_binary_handler_multiple_subdirectory_levels( + dashboard: DashboardTestHelper, + tmp_path: Path, + mock_storage_json: MagicMock, +) -> None: + """Test downloading files from multiple subdirectory levels. + + Verifies that joinpath correctly handles multi-level paths like 'build/output/firmware.bin'. + """ + # Create nested directory structure + build_dir = get_build_path(tmp_path, "test") + nested_dir = build_dir / "build" / "output" + nested_dir.mkdir(parents=True) + + firmware_file = build_dir / "firmware.bin" + firmware_file.write_bytes(b"main") + + nested_file = nested_dir / "firmware.bin" + nested_file.write_bytes(b"nested firmware content") + + # Mock storage JSON + mock_storage = Mock() + mock_storage.name = "test_device" + mock_storage.firmware_bin_path = firmware_file + mock_storage_json.load.return_value = mock_storage + + response = await dashboard.fetch( + "/download.bin?configuration=test.yaml&file=build/output/firmware.bin", + method="GET", + ) + assert response.code == 200 + assert response.body == b"nested firmware content" + + @pytest.mark.asyncio async def test_edit_request_handler_post_invalid_file( dashboard: DashboardTestHelper, From 0e34d1b64d9341c1146f862d5631061283894d77 Mon Sep 17 00:00:00 2001 From: Spectre5 <31610422+Spectre5@users.noreply.github.com> Date: Sat, 18 Oct 2025 18:13:57 -0700 Subject: [PATCH 165/336] Change all temperature offsets to temperature_delta (#11347) --- esphome/components/bme680_bsec/__init__.py | 2 +- esphome/components/bme68x_bsec2/__init__.py | 2 +- esphome/components/scd4x/sensor.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/esphome/components/bme680_bsec/__init__.py b/esphome/components/bme680_bsec/__init__.py index 330dc4dd9c..8a8d74b5f3 100644 --- a/esphome/components/bme680_bsec/__init__.py +++ b/esphome/components/bme680_bsec/__init__.py @@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All( cv.Schema( { cv.GenerateID(): cv.declare_id(BME680BSECComponent), - cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature, + cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta, cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum( IAQ_MODE_OPTIONS, upper=True ), diff --git a/esphome/components/bme68x_bsec2/__init__.py b/esphome/components/bme68x_bsec2/__init__.py index f4235b31b4..e421efb2d6 100644 --- a/esphome/components/bme68x_bsec2/__init__.py +++ b/esphome/components/bme68x_bsec2/__init__.py @@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = ( cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum( VOLTAGE_OPTIONS, upper=True ), - cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature, + cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta, cv.Optional( CONF_STATE_SAVE_INTERVAL, default="6hours" ): cv.positive_time_period_minutes, diff --git a/esphome/components/scd4x/sensor.py b/esphome/components/scd4x/sensor.py index 6b2188cd5a..ec90234ac3 100644 --- a/esphome/components/scd4x/sensor.py +++ b/esphome/components/scd4x/sensor.py @@ -81,7 +81,7 @@ CONFIG_SCHEMA = ( cv.int_range(min=0, max=0xFFFF, max_included=False), ), cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure, - cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature, + cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta, cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id( sensor.Sensor ), From 6aff1394ad55340b11dabac20b479ff1879446e3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 09:15:47 -1000 Subject: [PATCH 166/336] [core] Fix IndexError when OTA devices cannot be resolved (#11311) --- esphome/__main__.py | 4 +- tests/unit_tests/test_main.py | 78 +++++++++++++++++++---------------- 2 files changed, 45 insertions(+), 37 deletions(-) diff --git a/esphome/__main__.py b/esphome/__main__.py index d9bdfb175b..d7f11feef9 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -185,7 +185,9 @@ def choose_upload_log_host( else: resolved.append(device) if not resolved: - _LOGGER.error("All specified devices: %s could not be resolved.", defaults) + raise EsphomeError( + f"All specified devices {defaults} could not be resolved. Is the device connected to the network?" + ) return resolved # No devices specified, show interactive chooser diff --git a/tests/unit_tests/test_main.py b/tests/unit_tests/test_main.py index 59d0433aa4..73dfe359f0 100644 --- a/tests/unit_tests/test_main.py +++ b/tests/unit_tests/test_main.py @@ -321,12 +321,14 @@ def test_choose_upload_log_host_with_serial_device_no_ports( ) -> None: """Test SERIAL device when no serial ports are found.""" setup_core() - result = choose_upload_log_host( - default="SERIAL", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="SERIAL", + check_default=None, + purpose=Purpose.UPLOADING, + ) assert "No serial ports found, skipping SERIAL device" in caplog.text @@ -367,12 +369,14 @@ def test_choose_upload_log_host_with_ota_device_with_api_config() -> None: """Test OTA device when API is configured (no upload without OTA in config).""" setup_core(config={CONF_API: {}}, address="192.168.1.100") - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) def test_choose_upload_log_host_with_ota_device_with_api_config_logging() -> None: @@ -405,12 +409,14 @@ def test_choose_upload_log_host_with_ota_device_no_fallback() -> None: """Test OTA device with no valid fallback options.""" setup_core() - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) @pytest.mark.usefixtures("mock_choose_prompt") @@ -615,21 +621,19 @@ def test_choose_upload_log_host_empty_defaults_list() -> None: @pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging") -def test_choose_upload_log_host_all_devices_unresolved( - caplog: pytest.LogCaptureFixture, -) -> None: +def test_choose_upload_log_host_all_devices_unresolved() -> None: """Test when all specified devices cannot be resolved.""" setup_core() - result = choose_upload_log_host( - default=["SERIAL", "OTA"], - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] - assert ( - "All specified devices: ['SERIAL', 'OTA'] could not be resolved." in caplog.text - ) + with pytest.raises( + EsphomeError, + match=r"All specified devices \['SERIAL', 'OTA'\] could not be resolved", + ): + choose_upload_log_host( + default=["SERIAL", "OTA"], + check_default=None, + purpose=Purpose.UPLOADING, + ) @pytest.mark.usefixtures("mock_no_serial_ports", "mock_no_mqtt_logging") @@ -762,12 +766,14 @@ def test_choose_upload_log_host_no_address_with_ota_config() -> None: """Test OTA device when OTA is configured but no address is set.""" setup_core(config={CONF_OTA: {}}) - result = choose_upload_log_host( - default="OTA", - check_default=None, - purpose=Purpose.UPLOADING, - ) - assert result == [] + with pytest.raises( + EsphomeError, match="All specified devices .* could not be resolved" + ): + choose_upload_log_host( + default="OTA", + check_default=None, + purpose=Purpose.UPLOADING, + ) @dataclass From ea38237f29330f12e9718088b665b20fce008147 Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Sun, 19 Oct 2025 15:49:05 -0400 Subject: [PATCH 167/336] [esp32] Fix OTA rollback (#11300) Co-authored-by: J. Nick Koston --- esphome/components/esp32/core.cpp | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/esphome/components/esp32/core.cpp b/esphome/components/esp32/core.cpp index f3bdfea2a0..3427c96e70 100644 --- a/esphome/components/esp32/core.cpp +++ b/esphome/components/esp32/core.cpp @@ -6,6 +6,7 @@ #include #include #include +#include #include #include #include @@ -52,6 +53,16 @@ void arch_init() { disableCore1WDT(); #endif #endif + + // If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current + // partition will get rolled back unless it is marked as valid. + esp_ota_img_states_t state; + const esp_partition_t *running = esp_ota_get_running_partition(); + if (esp_ota_get_state_partition(running, &state) == ESP_OK) { + if (state == ESP_OTA_IMG_PENDING_VERIFY) { + esp_ota_mark_app_valid_cancel_rollback(); + } + } } void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); } From a186c1062f28e15315187f9716a89498d380dd52 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 10:06:43 +1300 Subject: [PATCH 168/336] Bump version to 2025.10.2 --- Doxyfile | 2 +- esphome/const.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doxyfile b/Doxyfile index 78f004c7e3..6ed69336d2 100644 --- a/Doxyfile +++ b/Doxyfile @@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 2025.10.1 +PROJECT_NUMBER = 2025.10.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/esphome/const.py b/esphome/const.py index c1abb8f530..db8903fd96 100644 --- a/esphome/const.py +++ b/esphome/const.py @@ -4,7 +4,7 @@ from enum import Enum from esphome.enum import StrEnum -__version__ = "2025.10.1" +__version__ = "2025.10.2" ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_" VALID_SUBSTITUTIONS_CHARACTERS = ( From 87ca8784ef969378ff56d78176add92be31471f1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 11:12:56 -1000 Subject: [PATCH 169/336] [openthread] Backport address resolution support to prevent OTA crash (#11312) Co-authored-by: Daniel Stiner --- esphome/core/__init__.py | 4 ++++ tests/unit_tests/test_core.py | 7 +++++++ 2 files changed, 11 insertions(+) diff --git a/esphome/core/__init__.py b/esphome/core/__init__.py index 43febd28a2..2d49d29c5e 100644 --- a/esphome/core/__init__.py +++ b/esphome/core/__init__.py @@ -11,6 +11,7 @@ from esphome.const import ( CONF_COMMENT, CONF_ESPHOME, CONF_ETHERNET, + CONF_OPENTHREAD, CONF_PORT, CONF_USE_ADDRESS, CONF_WEB_SERVER, @@ -641,6 +642,9 @@ class EsphomeCore: if CONF_ETHERNET in self.config: return self.config[CONF_ETHERNET][CONF_USE_ADDRESS] + if CONF_OPENTHREAD in self.config: + return f"{self.name}.local" + return None @property diff --git a/tests/unit_tests/test_core.py b/tests/unit_tests/test_core.py index 48eae06ea6..41114ae18b 100644 --- a/tests/unit_tests/test_core.py +++ b/tests/unit_tests/test_core.py @@ -570,6 +570,13 @@ class TestEsphomeCore: assert target.address == "4.3.2.1" + def test_address__openthread(self, target): + target.name = "test-device" + target.config = {} + target.config[const.CONF_OPENTHREAD] = {} + + assert target.address == "test-device.local" + def test_is_esp32(self, target): target.data[const.KEY_CORE] = {const.KEY_TARGET_PLATFORM: "esp32"} From 1a2057df3011a6e5de32ffb36b7efc62f451a9cd Mon Sep 17 00:00:00 2001 From: Juan Antonio Aldea Date: Sun, 19 Oct 2025 23:15:17 +0200 Subject: [PATCH 170/336] Migrate from hexencode() to format_hex_pretty() in Kuntze component (#11372) --- esphome/components/kuntze/kuntze.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/kuntze/kuntze.cpp b/esphome/components/kuntze/kuntze.cpp index 42545d9d54..30f98aaa99 100644 --- a/esphome/components/kuntze/kuntze.cpp +++ b/esphome/components/kuntze/kuntze.cpp @@ -14,7 +14,7 @@ void Kuntze::on_modbus_data(const std::vector &data) { auto get_16bit = [&](int i) -> uint16_t { return (uint16_t(data[i * 2]) << 8) | uint16_t(data[i * 2 + 1]); }; this->waiting_ = false; - ESP_LOGV(TAG, "Data: %s", hexencode(data).c_str()); + ESP_LOGV(TAG, "Data: %s", format_hex_pretty(data).c_str()); float value = (float) get_16bit(0); for (int i = 0; i < data[3]; i++) From 1e1fefbd0a865fe2858441ab9d35d73b71347a89 Mon Sep 17 00:00:00 2001 From: Javier Peletier Date: Sun, 19 Oct 2025 23:31:25 +0200 Subject: [PATCH 171/336] [substitutions] !extend and !remove now support substitutions and jinja (#11203) --- esphome/config.py | 109 ++++++++++++++---- esphome/config_helpers.py | 86 ++++---------- esphome/config_validation.py | 7 -- .../component_tests/packages/test_packages.py | 71 +++++------- .../05-extend-remove.approved.yaml | 9 ++ .../substitutions/05-extend-remove.input.yaml | 22 ++++ tests/unit_tests/test_substitutions.py | 3 + 7 files changed, 171 insertions(+), 136 deletions(-) create mode 100644 tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml create mode 100644 tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml diff --git a/esphome/config.py b/esphome/config.py index 6adecb5c65..634dba8dad 100644 --- a/esphome/config.py +++ b/esphome/config.py @@ -12,7 +12,7 @@ from typing import Any import voluptuous as vol from esphome import core, loader, pins, yaml_util -from esphome.config_helpers import Extend, Remove, merge_dicts_ordered +from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered import esphome.config_validation as cv from esphome.const import ( CONF_ESPHOME, @@ -324,13 +324,7 @@ def iter_ids(config, path=None): yield from iter_ids(value, path + [key]) -def recursive_check_replaceme(value): - if isinstance(value, list): - return cv.Schema([recursive_check_replaceme])(value) - if isinstance(value, dict): - return cv.Schema({cv.valid: recursive_check_replaceme})(value) - if isinstance(value, ESPLiteralValue): - pass +def check_replaceme(value): if isinstance(value, str) and value == "REPLACEME": raise cv.Invalid( "Found 'REPLACEME' in configuration, this is most likely an error. " @@ -339,7 +333,86 @@ def recursive_check_replaceme(value): "If you want to use the literal REPLACEME string, " 'please use "!literal REPLACEME"' ) - return value + + +def _build_list_index(lst): + index = OrderedDict() + extensions, removals = [], set() + for item in lst: + if item is None: + removals.add(None) + continue + item_id = None + if isinstance(item, dict) and (item_id := item.get(CONF_ID)): + if isinstance(item_id, Extend): + extensions.append(item) + continue + if isinstance(item_id, Remove): + removals.add(item_id.value) + continue + if not item_id or item_id in index: + # no id or duplicate -> pass through with identity-based key + item_id = id(item) + index[item_id] = item + return index, extensions, removals + + +def resolve_extend_remove(value, is_key=None): + if isinstance(value, ESPLiteralValue): + return # do not check inside literal blocks + if isinstance(value, list): + index, extensions, removals = _build_list_index(value) + if extensions or removals: + # Rebuild the original list after + # processing all extensions and removals + for item in extensions: + item_id = item[CONF_ID].value + if item_id in removals: + continue + old = index.get(item_id) + if old is None: + # Failed to find source for extension + # Find index of item to show error at correct position + i = next( + ( + i + for i, d in enumerate(value) + if d.get(CONF_ID) == item[CONF_ID] + ) + ) + with cv.prepend_path(i): + raise cv.Invalid( + f"Source for extension of ID '{item_id}' was not found." + ) + item[CONF_ID] = item_id + index[item_id] = merge_config(old, item) + for item_id in removals: + index.pop(item_id, None) + + value[:] = index.values() + + for i, item in enumerate(value): + with cv.prepend_path(i): + resolve_extend_remove(item, False) + return + if isinstance(value, dict): + removals = [] + for k, v in value.items(): + with cv.prepend_path(k): + if isinstance(v, Remove): + removals.append(k) + continue + resolve_extend_remove(k, True) + resolve_extend_remove(v, False) + for k in removals: + value.pop(k, None) + return + if is_key: + return # do not check keys (yet) + + check_replaceme(value) + + return class ConfigValidationStep(abc.ABC): @@ -437,19 +510,6 @@ class LoadValidationStep(ConfigValidationStep): continue p_name = p_config.get("platform") if p_name is None: - p_id = p_config.get(CONF_ID) - if isinstance(p_id, Extend): - result.add_str_error( - f"Source for extension of ID '{p_id.value}' was not found.", - path + [CONF_ID], - ) - continue - if isinstance(p_id, Remove): - result.add_str_error( - f"Source for removal of ID '{p_id.value}' was not found.", - path + [CONF_ID], - ) - continue result.add_str_error( f"'{self.domain}' requires a 'platform' key but it was not specified.", path, @@ -934,9 +994,10 @@ def validate_config( CORE.raw_config = config - # 1.1. Check for REPLACEME special value + # 1.1. Resolve !extend and !remove and check for REPLACEME + # After this step, there will not be any Extend or Remove values in the config anymore try: - recursive_check_replaceme(config) + resolve_extend_remove(config) except vol.Invalid as err: result.add_error(err) diff --git a/esphome/config_helpers.py b/esphome/config_helpers.py index 88cfa49fdc..c0a3b99968 100644 --- a/esphome/config_helpers.py +++ b/esphome/config_helpers.py @@ -1,7 +1,6 @@ from collections.abc import Callable from esphome.const import ( - CONF_ID, CONF_LEVEL, CONF_LOGGER, KEY_CORE, @@ -75,73 +74,28 @@ class Remove: return isinstance(b, Remove) and self.value == b.value -def merge_config(full_old, full_new): - def merge(old, new): - if isinstance(new, dict): - if not isinstance(old, dict): - return new - # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict - if isinstance(old, OrderedDict) or isinstance(new, OrderedDict): - res = OrderedDict(old) - else: - res = old.copy() - for k, v in new.items(): - if isinstance(v, Remove) and k in old: - del res[k] - else: - res[k] = merge(old[k], v) if k in old else v - return res - if isinstance(new, list): - if not isinstance(old, list): - return new - res = old.copy() - ids = { - v_id: i - for i, v in enumerate(res) - if isinstance(v, dict) - and (v_id := v.get(CONF_ID)) - and isinstance(v_id, str) - } - extend_ids = { - v_id.value: i - for i, v in enumerate(res) - if isinstance(v, dict) - and (v_id := v.get(CONF_ID)) - and isinstance(v_id, Extend) - } - - ids_to_delete = [] - for v in new: - if isinstance(v, dict) and (new_id := v.get(CONF_ID)): - if isinstance(new_id, Extend): - new_id = new_id.value - if new_id in ids: - v[CONF_ID] = new_id - res[ids[new_id]] = merge(res[ids[new_id]], v) - continue - elif isinstance(new_id, Remove): - new_id = new_id.value - if new_id in ids: - ids_to_delete.append(ids[new_id]) - continue - elif ( - new_id in extend_ids - ): # When a package is extending a non-packaged item - extend_res = res[extend_ids[new_id]] - extend_res[CONF_ID] = new_id - new_v = merge(v, extend_res) - res[extend_ids[new_id]] = new_v - continue - else: - ids[new_id] = len(res) - res.append(v) - return [v for i, v in enumerate(res) if i not in ids_to_delete] - if new is None: - return old - +def merge_config(old, new): + if isinstance(new, Remove): return new + if isinstance(new, dict): + if not isinstance(old, dict): + return new + # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict + if isinstance(old, OrderedDict) or isinstance(new, OrderedDict): + res = OrderedDict(old) + else: + res = old.copy() + for k, v in new.items(): + res[k] = merge_config(old.get(k), v) + return res + if isinstance(new, list): + if not isinstance(old, list): + return new + return old + new + if new is None: + return old - return merge(full_old, full_new) + return new def filter_source_files_from_platform( diff --git a/esphome/config_validation.py b/esphome/config_validation.py index e2f0b835c9..c613a984c4 100644 --- a/esphome/config_validation.py +++ b/esphome/config_validation.py @@ -24,7 +24,6 @@ import voluptuous as vol from esphome import core import esphome.codegen as cg -from esphome.config_helpers import Extend, Remove from esphome.const import ( ALLOWED_NAME_CHARS, CONF_AVAILABILITY, @@ -624,12 +623,6 @@ def declare_id(type): if value is None: return core.ID(None, is_declaration=True, type=type) - if isinstance(value, Extend): - raise Invalid(f"Source for extension of ID '{value.value}' was not found.") - - if isinstance(value, Remove): - raise Invalid(f"Source for Removal of ID '{value.value}' was not found.") - return core.ID(validate_id_name(value), is_declaration=True, type=type) return validator diff --git a/tests/component_tests/packages/test_packages.py b/tests/component_tests/packages/test_packages.py index 4712daad0d..d66ca58a69 100644 --- a/tests/component_tests/packages/test_packages.py +++ b/tests/component_tests/packages/test_packages.py @@ -6,6 +6,7 @@ from unittest.mock import MagicMock, patch import pytest from esphome.components.packages import do_packages_pass +from esphome.config import resolve_extend_remove from esphome.config_helpers import Extend, Remove import esphome.config_validation as cv from esphome.const import ( @@ -64,13 +65,20 @@ def fixture_basic_esphome(): return {CONF_NAME: TEST_DEVICE_NAME, CONF_PLATFORM: TEST_PLATFORM} +def packages_pass(config): + """Wrapper around packages_pass that also resolves Extend and Remove.""" + config = do_packages_pass(config) + resolve_extend_remove(config) + return config + + def test_package_unused(basic_esphome, basic_wifi): """ Ensures do_package_pass does not change a config if packages aren't used. """ config = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi} - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == config @@ -83,7 +91,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi): config = {CONF_ESPHOME: basic_esphome, CONF_PACKAGES: basic_wifi | {CONF_URL: ""}} with pytest.raises(cv.Invalid): - do_packages_pass(config) + packages_pass(config) def test_package_include(basic_wifi, basic_esphome): @@ -99,7 +107,7 @@ def test_package_include(basic_wifi, basic_esphome): expected = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi} - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -124,7 +132,7 @@ def test_package_append(basic_wifi, basic_esphome): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -148,7 +156,7 @@ def test_package_override(basic_wifi, basic_esphome): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -177,7 +185,7 @@ def test_multiple_package_order(): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -233,7 +241,7 @@ def test_package_list_merge(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -311,7 +319,7 @@ def test_package_list_merge_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -350,13 +358,13 @@ def test_package_merge_by_id_with_list(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected def test_package_merge_by_missing_id(): """ - Ensures that components with missing IDs are not merged. + Ensures that a validation error is thrown when trying to extend a missing ID. """ config = { @@ -379,25 +387,15 @@ def test_package_merge_by_missing_id(): ], } - expected = { - CONF_SENSOR: [ - { - CONF_ID: TEST_SENSOR_ID_1, - CONF_FILTERS: [{CONF_MULTIPLY: 42.0}], - }, - { - CONF_ID: TEST_SENSOR_ID_1, - CONF_FILTERS: [{CONF_MULTIPLY: 10.0}], - }, - { - CONF_ID: Extend(TEST_SENSOR_ID_2), - CONF_FILTERS: [{CONF_OFFSET: 146.0}], - }, - ] - } + error_raised = False + try: + packages_pass(config) + assert False, "Expected validation error for missing ID" + except cv.Invalid as err: + error_raised = True + assert err.path == [CONF_SENSOR, 2] - actual = do_packages_pass(config) - assert actual == expected + assert error_raised def test_package_list_remove_by_id(): @@ -447,7 +445,7 @@ def test_package_list_remove_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -493,7 +491,7 @@ def test_multiple_package_list_remove_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -514,7 +512,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome): CONF_ESPHOME: basic_esphome, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -545,7 +543,6 @@ def test_package_remove_by_missing_id(): } expected = { - "missing_key": Remove(), CONF_SENSOR: [ { CONF_ID: TEST_SENSOR_ID_1, @@ -555,14 +552,10 @@ def test_package_remove_by_missing_id(): CONF_ID: TEST_SENSOR_ID_1, CONF_FILTERS: [{CONF_MULTIPLY: 10.0}], }, - { - CONF_ID: Remove(TEST_SENSOR_ID_2), - CONF_FILTERS: [{CONF_OFFSET: 146.0}], - }, ], } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -634,7 +627,7 @@ def test_remote_packages_with_files_list( ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -730,5 +723,5 @@ def test_remote_packages_with_files_and_vars( ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected diff --git a/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml b/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml new file mode 100644 index 0000000000..a479370f4b --- /dev/null +++ b/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml @@ -0,0 +1,9 @@ +substitutions: + A: component1 + B: component2 + C: component3 +some_component: + - id: component1 + value: 2 + - id: component2 + value: 5 diff --git a/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml b/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml new file mode 100644 index 0000000000..2e0e60798d --- /dev/null +++ b/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml @@ -0,0 +1,22 @@ +substitutions: + A: component1 + B: component2 + C: component3 + +packages: + - some_component: + - id: component1 + value: 1 + - id: !extend ${B} + value: 4 + - id: !extend ${B} + value: 5 + - id: component3 + value: 6 + +some_component: + - id: !extend ${A} + value: 2 + - id: component2 + value: 3 + - id: !remove ${C} diff --git a/tests/unit_tests/test_substitutions.py b/tests/unit_tests/test_substitutions.py index 59396a4a83..beb1ebc73e 100644 --- a/tests/unit_tests/test_substitutions.py +++ b/tests/unit_tests/test_substitutions.py @@ -4,6 +4,7 @@ from pathlib import Path from esphome import config as config_module, yaml_util from esphome.components import substitutions +from esphome.config import resolve_extend_remove from esphome.config_helpers import merge_config from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS from esphome.core import CORE @@ -81,6 +82,8 @@ def test_substitutions_fixtures(fixture_path): substitutions.do_substitution_pass(config, None) + resolve_extend_remove(config) + # Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE if expected_path.is_file(): expected = yaml_util.load_yaml(expected_path) From afbd3f77af222968161f7bc923b392e29fe00879 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 11:08:30 +1300 Subject: [PATCH 172/336] [light] Clean up deprecated functions from 1.21 (#11389) --- esphome/components/light/addressable_light.h | 8 +++---- esphome/components/light/light_traits.h | 22 +------------------- 2 files changed, 4 insertions(+), 26 deletions(-) diff --git a/esphome/components/light/addressable_light.h b/esphome/components/light/addressable_light.h index baa4507d2f..3e94a39745 100644 --- a/esphome/components/light/addressable_light.h +++ b/esphome/components/light/addressable_light.h @@ -1,11 +1,11 @@ #pragma once -#include "esphome/core/component.h" -#include "esphome/core/defines.h" -#include "esphome/core/color.h" #include "esp_color_correction.h" #include "esp_color_view.h" #include "esp_range_view.h" +#include "esphome/core/color.h" +#include "esphome/core/component.h" +#include "esphome/core/defines.h" #include "light_output.h" #include "light_state.h" #include "transformers.h" @@ -17,8 +17,6 @@ namespace esphome { namespace light { -using ESPColor ESPDEPRECATED("esphome::light::ESPColor is deprecated, use esphome::Color instead.", "v1.21") = Color; - /// Convert the color information from a `LightColorValues` object to a `Color` object (does not apply brightness). Color color_from_light_color_values(LightColorValues val); diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index c83d8ad2a9..4532edca83 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -1,7 +1,7 @@ #pragma once -#include "esphome/core/helpers.h" #include "color_mode.h" +#include "esphome/core/helpers.h" namespace esphome { @@ -31,26 +31,6 @@ class LightTraits { return this->supported_color_modes_.has_capability(color_capability); } - ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21") - bool get_supports_brightness() const { return this->supports_color_capability(ColorCapability::BRIGHTNESS); } - ESPDEPRECATED("get_supports_rgb() is deprecated, use color modes instead.", "v1.21") - bool get_supports_rgb() const { return this->supports_color_capability(ColorCapability::RGB); } - ESPDEPRECATED("get_supports_rgb_white_value() is deprecated, use color modes instead.", "v1.21") - bool get_supports_rgb_white_value() const { - return this->supports_color_mode(ColorMode::RGB_WHITE) || - this->supports_color_mode(ColorMode::RGB_COLOR_TEMPERATURE); - } - ESPDEPRECATED("get_supports_color_temperature() is deprecated, use color modes instead.", "v1.21") - bool get_supports_color_temperature() const { - return this->supports_color_capability(ColorCapability::COLOR_TEMPERATURE); - } - ESPDEPRECATED("get_supports_color_interlock() is deprecated, use color modes instead.", "v1.21") - bool get_supports_color_interlock() const { - return this->supports_color_mode(ColorMode::RGB) && - (this->supports_color_mode(ColorMode::WHITE) || this->supports_color_mode(ColorMode::COLD_WARM_WHITE) || - this->supports_color_mode(ColorMode::COLOR_TEMPERATURE)); - } - float get_min_mireds() const { return this->min_mireds_; } void set_min_mireds(float min_mireds) { this->min_mireds_ = min_mireds; } float get_max_mireds() const { return this->max_mireds_; } From 9c146a70708899fc01522849bf8e018919c8f841 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 11:11:35 +1300 Subject: [PATCH 173/336] [climate] Clean up deprecated functions from 1.20 (#11388) --- esphome/components/climate/climate_traits.h | 46 +-------------------- 1 file changed, 2 insertions(+), 44 deletions(-) diff --git a/esphome/components/climate/climate_traits.h b/esphome/components/climate/climate_traits.h index 50c1e79ad2..2962a147d7 100644 --- a/esphome/components/climate/climate_traits.h +++ b/esphome/components/climate/climate_traits.h @@ -1,8 +1,8 @@ #pragma once -#include "esphome/core/helpers.h" -#include "climate_mode.h" #include +#include "climate_mode.h" +#include "esphome/core/helpers.h" namespace esphome { @@ -109,44 +109,12 @@ class ClimateTraits { void set_supported_modes(std::set modes) { this->supported_modes_ = std::move(modes); } void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_cool_mode(bool supports_cool_mode) { set_mode_support_(CLIMATE_MODE_COOL, supports_cool_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_heat_mode(bool supports_heat_mode) { set_mode_support_(CLIMATE_MODE_HEAT, supports_heat_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_heat_cool_mode(bool supported) { set_mode_support_(CLIMATE_MODE_HEAT_COOL, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_fan_only_mode(bool supports_fan_only_mode) { - set_mode_support_(CLIMATE_MODE_FAN_ONLY, supports_fan_only_mode); - } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); } bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); } const std::set &get_supported_modes() const { return this->supported_modes_; } void set_supported_fan_modes(std::set modes) { this->supported_fan_modes_ = std::move(modes); } void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); } void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_off(bool supported) { set_fan_mode_support_(CLIMATE_FAN_OFF, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_auto(bool supported) { set_fan_mode_support_(CLIMATE_FAN_AUTO, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_low(bool supported) { set_fan_mode_support_(CLIMATE_FAN_LOW, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_medium(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MEDIUM, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_high(bool supported) { set_fan_mode_support_(CLIMATE_FAN_HIGH, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_middle(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MIDDLE, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); } bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); } bool get_supports_fan_modes() const { return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty(); @@ -178,16 +146,6 @@ class ClimateTraits { void set_supported_swing_modes(std::set modes) { this->supported_swing_modes_ = std::move(modes); } void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_both(bool supported) { set_swing_mode_support_(CLIMATE_SWING_BOTH, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_vertical(bool supported) { set_swing_mode_support_(CLIMATE_SWING_VERTICAL, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_horizontal(bool supported) { - set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported); - } bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); } bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); } const std::set &get_supported_swing_modes() const { return this->supported_swing_modes_; } From 020cea80b205a6a7104a928f3f5142a09171748d Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 11:16:50 +1300 Subject: [PATCH 174/336] [nextion] Clean up deprecated code from 1.20 (#11393) --- esphome/components/nextion/nextion.cpp | 3 --- 1 file changed, 3 deletions(-) diff --git a/esphome/components/nextion/nextion.cpp b/esphome/components/nextion/nextion.cpp index 0ce9d02e97..fc152ece1e 100644 --- a/esphome/components/nextion/nextion.cpp +++ b/esphome/components/nextion/nextion.cpp @@ -1291,9 +1291,6 @@ void Nextion::check_pending_waveform_() { void Nextion::set_writer(const nextion_writer_t &writer) { this->writer_ = writer; } -ESPDEPRECATED("set_wait_for_ack(bool) deprecated, no effect", "v1.20") -void Nextion::set_wait_for_ack(bool wait_for_ack) { ESP_LOGE(TAG, "Deprecated"); } - bool Nextion::is_updating() { return this->connection_state_.is_updating_; } } // namespace nextion From 862bbb7fe158ea44d519c8c705b444f928d62ed4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 13:09:09 -1000 Subject: [PATCH 175/336] [ci] Fix memory impact analysis failing on fork PRs (#11380) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> --- .../workflows/ci-memory-impact-comment.yml | 108 +++++++++ .github/workflows/ci.yml | 65 ++---- script/ci_add_metadata_to_json.py | 88 ++++++++ script/ci_memory_impact_comment.py | 207 ++++++++++++------ 4 files changed, 358 insertions(+), 110 deletions(-) create mode 100644 .github/workflows/ci-memory-impact-comment.yml create mode 100755 script/ci_add_metadata_to_json.py diff --git a/.github/workflows/ci-memory-impact-comment.yml b/.github/workflows/ci-memory-impact-comment.yml new file mode 100644 index 0000000000..4ce7abfb85 --- /dev/null +++ b/.github/workflows/ci-memory-impact-comment.yml @@ -0,0 +1,108 @@ +--- +name: Memory Impact Comment (Forks) + +on: + workflow_run: + workflows: ["CI"] + types: [completed] + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + memory-impact-comment: + name: Post memory impact comment (fork PRs only) + runs-on: ubuntu-24.04 + # Only run for PRs from forks that had successful CI runs + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' && + github.event.workflow_run.head_repository.full_name != github.repository + env: + GH_TOKEN: ${{ github.token }} + steps: + - name: Get PR details + id: pr + run: | + # Get PR details by searching for PR with matching head SHA + # The workflow_run.pull_requests field is often empty for forks + head_sha="${{ github.event.workflow_run.head_sha }}" + pr_data=$(gh api "/repos/${{ github.repository }}/commits/$head_sha/pulls" \ + --jq '.[0] | {number: .number, base_ref: .base.ref}') + if [ -z "$pr_data" ] || [ "$pr_data" == "null" ]; then + echo "No PR found for SHA $head_sha, skipping" + echo "skip=true" >> $GITHUB_OUTPUT + exit 0 + fi + + pr_number=$(echo "$pr_data" | jq -r '.number') + base_ref=$(echo "$pr_data" | jq -r '.base_ref') + + echo "pr_number=$pr_number" >> $GITHUB_OUTPUT + echo "base_ref=$base_ref" >> $GITHUB_OUTPUT + echo "Found PR #$pr_number targeting base branch: $base_ref" + + - name: Check out code from base repository + if: steps.pr.outputs.skip != 'true' + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Always check out from the base repository (esphome/esphome), never from forks + # Use the PR's target branch to ensure we run trusted code from the main repo + repository: ${{ github.repository }} + ref: ${{ steps.pr.outputs.base_ref }} + + - name: Restore Python + if: steps.pr.outputs.skip != 'true' + uses: ./.github/actions/restore-python + with: + python-version: "3.11" + cache-key: ${{ hashFiles('.cache-key') }} + + - name: Download memory analysis artifacts + if: steps.pr.outputs.skip != 'true' + run: | + run_id="${{ github.event.workflow_run.id }}" + echo "Downloading artifacts from workflow run $run_id" + + mkdir -p memory-analysis + + # Download target analysis artifact + if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then + echo "Downloaded memory-analysis-target artifact." + else + echo "No memory-analysis-target artifact found." + fi + + # Download PR analysis artifact + if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then + echo "Downloaded memory-analysis-pr artifact." + else + echo "No memory-analysis-pr artifact found." + fi + + - name: Check if artifacts exist + id: check + if: steps.pr.outputs.skip != 'true' + run: | + if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then + echo "found=true" >> $GITHUB_OUTPUT + else + echo "found=false" >> $GITHUB_OUTPUT + echo "Memory analysis artifacts not found, skipping comment" + fi + + - name: Post or update PR comment + if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true' + env: + PR_NUMBER: ${{ steps.pr.outputs.pr_number }} + run: | + . venv/bin/activate + # Pass PR number and JSON file paths directly to Python script + # Let Python parse the JSON to avoid shell injection risks + # The script will validate and sanitize all inputs + python script/ci_memory_impact_comment.py \ + --pr-number "$PR_NUMBER" \ + --target-json ./memory-analysis/memory-analysis-target.json \ + --pr-json ./memory-analysis/memory-analysis-pr.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 42f934de9d..6f96f2ac14 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -641,6 +641,12 @@ jobs: --output-env \ --output-json memory-analysis-target.json + # Add metadata to JSON before caching + python script/ci_add_metadata_to_json.py \ + --json-file memory-analysis-target.json \ + --components "$components" \ + --platform "$platform" + - name: Save memory analysis to cache if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 @@ -720,6 +726,13 @@ jobs: python script/ci_memory_impact_extract.py \ --output-env \ --output-json memory-analysis-pr.json + + # Add metadata to JSON (components and platform are in shell variables above) + python script/ci_add_metadata_to_json.py \ + --json-file memory-analysis-pr.json \ + --components "$components" \ + --platform "$platform" + - name: Upload memory analysis JSON uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: @@ -736,10 +749,12 @@ jobs: - determine-jobs - memory-impact-target-branch - memory-impact-pr-branch - if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true' + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true' permissions: contents: read pull-requests: write + env: + GH_TOKEN: ${{ github.token }} steps: - name: Check out code uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -762,52 +777,16 @@ jobs: continue-on-error: true - name: Post or update PR comment env: - GH_TOKEN: ${{ github.token }} - COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }} - PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }} - TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }} - TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }} - PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }} - PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }} - TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }} + PR_NUMBER: ${{ github.event.pull_request.number }} run: | . venv/bin/activate - # Check if analysis JSON files exist - target_json_arg="" - pr_json_arg="" - - if [ -f ./memory-analysis/memory-analysis-target.json ]; then - echo "Found target analysis JSON" - target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json" - else - echo "No target analysis JSON found" - fi - - if [ -f ./memory-analysis/memory-analysis-pr.json ]; then - echo "Found PR analysis JSON" - pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json" - else - echo "No PR analysis JSON found" - fi - - # Add cache flag if target was cached - cache_flag="" - if [ "$TARGET_CACHE_HIT" == "true" ]; then - cache_flag="--target-cache-hit" - fi - + # Pass JSON file paths directly to Python script + # All data is extracted from JSON files for security python script/ci_memory_impact_comment.py \ - --pr-number "${{ github.event.pull_request.number }}" \ - --components "$COMPONENTS" \ - --platform "$PLATFORM" \ - --target-ram "$TARGET_RAM" \ - --target-flash "$TARGET_FLASH" \ - --pr-ram "$PR_RAM" \ - --pr-flash "$PR_FLASH" \ - $target_json_arg \ - $pr_json_arg \ - $cache_flag + --pr-number "$PR_NUMBER" \ + --target-json ./memory-analysis/memory-analysis-target.json \ + --pr-json ./memory-analysis/memory-analysis-pr.json ci-status: name: CI Status diff --git a/script/ci_add_metadata_to_json.py b/script/ci_add_metadata_to_json.py new file mode 100755 index 0000000000..687b5131c0 --- /dev/null +++ b/script/ci_add_metadata_to_json.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +"""Add metadata to memory analysis JSON file. + +This script adds components and platform metadata to an existing +memory analysis JSON file. Used by CI to ensure all required fields are present +for the comment script. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import sys + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Add metadata to memory analysis JSON file" + ) + parser.add_argument( + "--json-file", + required=True, + help="Path to JSON file to update", + ) + parser.add_argument( + "--components", + required=True, + help='JSON array of component names (e.g., \'["api", "wifi"]\')', + ) + parser.add_argument( + "--platform", + required=True, + help="Platform name", + ) + + args = parser.parse_args() + + # Load existing JSON + json_path = Path(args.json_file) + if not json_path.exists(): + print(f"Error: JSON file not found: {args.json_file}", file=sys.stderr) + return 1 + + try: + with open(json_path, encoding="utf-8") as f: + data = json.load(f) + except (json.JSONDecodeError, OSError) as e: + print(f"Error loading JSON: {e}", file=sys.stderr) + return 1 + + # Parse components + try: + components = json.loads(args.components) + if not isinstance(components, list): + print("Error: --components must be a JSON array", file=sys.stderr) + return 1 + # Element-level validation: ensure each component is a non-empty string + for idx, comp in enumerate(components): + if not isinstance(comp, str) or not comp.strip(): + print( + f"Error: component at index {idx} is not a non-empty string: {comp!r}", + file=sys.stderr, + ) + return 1 + except json.JSONDecodeError as e: + print(f"Error parsing components: {e}", file=sys.stderr) + return 1 + + # Add metadata + data["components"] = components + data["platform"] = args.platform + + # Write back + try: + with open(json_path, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2) + print(f"Added metadata to {args.json_file}", file=sys.stderr) + except OSError as e: + print(f"Error writing JSON: {e}", file=sys.stderr) + return 1 + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py index 4e3fbb9086..1331a44d03 100755 --- a/script/ci_memory_impact_comment.py +++ b/script/ci_memory_impact_comment.py @@ -24,6 +24,37 @@ sys.path.insert(0, str(Path(__file__).parent.parent)) # Comment marker to identify our memory impact comments COMMENT_MARKER = "" + +def run_gh_command(args: list[str], operation: str) -> subprocess.CompletedProcess: + """Run a gh CLI command with error handling. + + Args: + args: Command arguments (including 'gh') + operation: Description of the operation for error messages + + Returns: + CompletedProcess result + + Raises: + subprocess.CalledProcessError: If command fails (with detailed error output) + """ + try: + return subprocess.run( + args, + check=True, + capture_output=True, + text=True, + ) + except subprocess.CalledProcessError as e: + print( + f"ERROR: {operation} failed with exit code {e.returncode}", file=sys.stderr + ) + print(f"ERROR: Command: {' '.join(args)}", file=sys.stderr) + print(f"ERROR: stdout: {e.stdout}", file=sys.stderr) + print(f"ERROR: stderr: {e.stderr}", file=sys.stderr) + raise + + # Thresholds for emoji significance indicators (percentage) OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes @@ -238,7 +269,6 @@ def create_comment_body( pr_analysis: dict | None = None, target_symbols: dict | None = None, pr_symbols: dict | None = None, - target_cache_hit: bool = False, ) -> str: """Create the comment body with memory impact analysis using Jinja2 templates. @@ -253,7 +283,6 @@ def create_comment_body( pr_analysis: Optional component breakdown for PR branch target_symbols: Optional symbol map for target branch pr_symbols: Optional symbol map for PR branch - target_cache_hit: Whether target branch analysis was loaded from cache Returns: Formatted comment body @@ -283,7 +312,6 @@ def create_comment_body( "flash_change": format_change( target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD ), - "target_cache_hit": target_cache_hit, "component_change_threshold": COMPONENT_CHANGE_THRESHOLD, } @@ -356,7 +384,7 @@ def find_existing_comment(pr_number: str) -> str | None: print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr) # Use gh api to get comments directly - this returns the numeric id field - result = subprocess.run( + result = run_gh_command( [ "gh", "api", @@ -364,9 +392,7 @@ def find_existing_comment(pr_number: str) -> str | None: "--jq", ".[] | {id, body}", ], - capture_output=True, - text=True, - check=True, + operation="Get PR comments", ) print( @@ -420,7 +446,8 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None: subprocess.CalledProcessError: If gh command fails """ print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr) - result = subprocess.run( + print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr) + result = run_gh_command( [ "gh", "api", @@ -430,9 +457,7 @@ def update_existing_comment(comment_id: str, comment_body: str) -> None: "-f", f"body={comment_body}", ], - check=True, - capture_output=True, - text=True, + operation="Update PR comment", ) print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) @@ -448,11 +473,10 @@ def create_new_comment(pr_number: str, comment_body: str) -> None: subprocess.CalledProcessError: If gh command fails """ print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr) - result = subprocess.run( + print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr) + result = run_gh_command( ["gh", "pr", "comment", pr_number, "--body", comment_body], - check=True, - capture_output=True, - text=True, + operation="Create PR comment", ) print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) @@ -484,80 +508,129 @@ def main() -> int: description="Post or update PR comment with memory impact analysis" ) parser.add_argument("--pr-number", required=True, help="PR number") - parser.add_argument( - "--components", - required=True, - help='JSON array of component names (e.g., \'["api", "wifi"]\')', - ) - parser.add_argument("--platform", required=True, help="Platform name") - parser.add_argument( - "--target-ram", type=int, required=True, help="Target branch RAM usage" - ) - parser.add_argument( - "--target-flash", type=int, required=True, help="Target branch flash usage" - ) - parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage") - parser.add_argument( - "--pr-flash", type=int, required=True, help="PR branch flash usage" - ) parser.add_argument( "--target-json", - help="Optional path to target branch analysis JSON (for detailed analysis)", + required=True, + help="Path to target branch analysis JSON file", ) parser.add_argument( "--pr-json", - help="Optional path to PR branch analysis JSON (for detailed analysis)", - ) - parser.add_argument( - "--target-cache-hit", - action="store_true", - help="Indicates that target branch analysis was loaded from cache", + required=True, + help="Path to PR branch analysis JSON file", ) args = parser.parse_args() - # Parse components from JSON - try: - components = json.loads(args.components) - if not isinstance(components, list): - print("Error: --components must be a JSON array", file=sys.stderr) - sys.exit(1) - except json.JSONDecodeError as e: - print(f"Error parsing --components JSON: {e}", file=sys.stderr) + # Load analysis JSON files (all data comes from JSON for security) + target_data: dict | None = load_analysis_json(args.target_json) + if not target_data: + print("Error: Failed to load target analysis JSON", file=sys.stderr) sys.exit(1) - # Load analysis JSON files - target_analysis = None - pr_analysis = None - target_symbols = None - pr_symbols = None + pr_data: dict | None = load_analysis_json(args.pr_json) + if not pr_data: + print("Error: Failed to load PR analysis JSON", file=sys.stderr) + sys.exit(1) - if args.target_json: - target_data = load_analysis_json(args.target_json) - if target_data and target_data.get("detailed_analysis"): - target_analysis = target_data["detailed_analysis"].get("components") - target_symbols = target_data["detailed_analysis"].get("symbols") + # Extract detailed analysis if available + target_analysis: dict | None = None + pr_analysis: dict | None = None + target_symbols: dict | None = None + pr_symbols: dict | None = None - if args.pr_json: - pr_data = load_analysis_json(args.pr_json) - if pr_data and pr_data.get("detailed_analysis"): - pr_analysis = pr_data["detailed_analysis"].get("components") - pr_symbols = pr_data["detailed_analysis"].get("symbols") + if target_data.get("detailed_analysis"): + target_analysis = target_data["detailed_analysis"].get("components") + target_symbols = target_data["detailed_analysis"].get("symbols") + + if pr_data.get("detailed_analysis"): + pr_analysis = pr_data["detailed_analysis"].get("components") + pr_symbols = pr_data["detailed_analysis"].get("symbols") + + # Extract all values from JSON files (prevents shell injection from PR code) + components = target_data.get("components") + platform = target_data.get("platform") + target_ram = target_data.get("ram_bytes") + target_flash = target_data.get("flash_bytes") + pr_ram = pr_data.get("ram_bytes") + pr_flash = pr_data.get("flash_bytes") + + # Validate required fields and types + missing_fields: list[str] = [] + type_errors: list[str] = [] + + if components is None: + missing_fields.append("components") + elif not isinstance(components, list): + type_errors.append( + f"components must be a list, got {type(components).__name__}" + ) + else: + for idx, comp in enumerate(components): + if not isinstance(comp, str): + type_errors.append( + f"components[{idx}] must be a string, got {type(comp).__name__}" + ) + if platform is None: + missing_fields.append("platform") + elif not isinstance(platform, str): + type_errors.append(f"platform must be a string, got {type(platform).__name__}") + + if target_ram is None: + missing_fields.append("target.ram_bytes") + elif not isinstance(target_ram, int): + type_errors.append( + f"target.ram_bytes must be an integer, got {type(target_ram).__name__}" + ) + + if target_flash is None: + missing_fields.append("target.flash_bytes") + elif not isinstance(target_flash, int): + type_errors.append( + f"target.flash_bytes must be an integer, got {type(target_flash).__name__}" + ) + + if pr_ram is None: + missing_fields.append("pr.ram_bytes") + elif not isinstance(pr_ram, int): + type_errors.append( + f"pr.ram_bytes must be an integer, got {type(pr_ram).__name__}" + ) + + if pr_flash is None: + missing_fields.append("pr.flash_bytes") + elif not isinstance(pr_flash, int): + type_errors.append( + f"pr.flash_bytes must be an integer, got {type(pr_flash).__name__}" + ) + + if missing_fields or type_errors: + if missing_fields: + print( + f"Error: JSON files missing required fields: {', '.join(missing_fields)}", + file=sys.stderr, + ) + if type_errors: + print( + f"Error: Type validation failed: {'; '.join(type_errors)}", + file=sys.stderr, + ) + print(f"Target JSON keys: {list(target_data.keys())}", file=sys.stderr) + print(f"PR JSON keys: {list(pr_data.keys())}", file=sys.stderr) + sys.exit(1) # Create comment body # Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run. comment_body = create_comment_body( components=components, - platform=args.platform, - target_ram=args.target_ram, - target_flash=args.target_flash, - pr_ram=args.pr_ram, - pr_flash=args.pr_flash, + platform=platform, + target_ram=target_ram, + target_flash=target_flash, + pr_ram=pr_ram, + pr_flash=pr_flash, target_analysis=target_analysis, pr_analysis=pr_analysis, target_symbols=target_symbols, pr_symbols=pr_symbols, - target_cache_hit=args.target_cache_hit, ) # Post or update comment From 0f87e7508b8e9ef1c2bedc0918afef5674cf3e62 Mon Sep 17 00:00:00 2001 From: Juan Antonio Aldea Date: Mon, 20 Oct 2025 01:09:28 +0200 Subject: [PATCH 176/336] remove hexencode due 2022.1 deprecation (#11383) --- esphome/core/helpers.h | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index 37a64d46b2..234d2a7d7d 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -1158,18 +1158,4 @@ template::value, int> = 0> T &id(T ///@} -/// @name Deprecated functions -///@{ - -ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1") -inline std::string hexencode(const uint8_t *data, uint32_t len) { return format_hex_pretty(data, len); } - -template -ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1") -std::string hexencode(const T &data) { - return hexencode(data.data(), data.size()); -} - -///@} - } // namespace esphome From a59b1494d80955a73cef001fceca889f94b654b1 Mon Sep 17 00:00:00 2001 From: Javier Peletier Date: Mon, 20 Oct 2025 03:17:16 +0200 Subject: [PATCH 177/336] [substitutions] Recursive substitutions and better jinja error handling and debug help (#10806) --- esphome/components/substitutions/__init__.py | 17 +-- esphome/components/substitutions/jinja.py | 109 +++++++++++++++--- .../02-expressions.approved.yaml | 2 + .../substitutions/02-expressions.input.yaml | 2 + 4 files changed, 100 insertions(+), 30 deletions(-) diff --git a/esphome/components/substitutions/__init__.py b/esphome/components/substitutions/__init__.py index e6bcdc063a..098d56bfad 100644 --- a/esphome/components/substitutions/__init__.py +++ b/esphome/components/substitutions/__init__.py @@ -6,7 +6,7 @@ import esphome.config_validation as cv from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base -from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja +from .jinja import Jinja, JinjaError, JinjaStr, has_jinja CODEOWNERS = ["@esphome/core"] _LOGGER = logging.getLogger(__name__) @@ -57,17 +57,12 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing): "->".join(str(x) for x in path), err.message, ) - except ( - TemplateError, - TemplateRuntimeError, - RuntimeError, - ArithmeticError, - AttributeError, - TypeError, - ) as err: + except JinjaError as err: raise cv.Invalid( - f"{type(err).__name__} Error evaluating jinja expression '{value}': {str(err)}." - f" See {'->'.join(str(x) for x in path)}", + f"{err.error_name()} Error evaluating jinja expression '{value}': {str(err.parent())}." + f"\nEvaluation stack: (most recent evaluation last)\n{err.stack_trace_str()}" + f"\nRelevant context:\n{err.context_trace_str()}" + f"\nSee {'->'.join(str(x) for x in path)}", path, ) return value diff --git a/esphome/components/substitutions/jinja.py b/esphome/components/substitutions/jinja.py index e7164d8fff..dde0162993 100644 --- a/esphome/components/substitutions/jinja.py +++ b/esphome/components/substitutions/jinja.py @@ -6,6 +6,8 @@ import re import jinja2 as jinja from jinja2.sandbox import SandboxedEnvironment +from esphome.yaml_util import ESPLiteralValue + TemplateError = jinja.TemplateError TemplateSyntaxError = jinja.TemplateSyntaxError TemplateRuntimeError = jinja.TemplateRuntimeError @@ -26,18 +28,20 @@ def has_jinja(st): return detect_jinja_re.search(st) is not None -# SAFE_GLOBAL_FUNCTIONS defines a allowlist of built-in functions that are considered safe to expose +# SAFE_GLOBALS defines a allowlist of built-in functions or modules that are considered safe to expose # in Jinja templates or other sandboxed evaluation contexts. Only functions that do not allow # arbitrary code execution, file access, or other security risks are included. # # The following functions are considered safe: +# - math: The entire math module is injected, allowing access to mathematical functions like sin, cos, sqrt, etc. # - ord: Converts a character to its Unicode code point integer. # - chr: Converts an integer to its corresponding Unicode character. # - len: Returns the length of a sequence or collection. # # These functions were chosen because they are pure, have no side effects, and do not provide access # to the file system, environment, or other potentially sensitive resources. -SAFE_GLOBAL_FUNCTIONS = { +SAFE_GLOBALS = { + "math": math, # Inject entire math module "ord": ord, "chr": chr, "len": len, @@ -56,22 +60,62 @@ class JinjaStr(str): later in the main substitutions pass. """ + Undefined = object() + def __new__(cls, value: str, upvalues=None): - obj = super().__new__(cls, value) - obj.upvalues = upvalues or {} + if isinstance(value, JinjaStr): + base = str(value) + merged = {**value.upvalues, **(upvalues or {})} + else: + base = value + merged = dict(upvalues or {}) + obj = super().__new__(cls, base) + obj.upvalues = merged + obj.result = JinjaStr.Undefined return obj - def __init__(self, value: str, upvalues=None): - self.upvalues = upvalues or {} + +class JinjaError(Exception): + def __init__(self, context_trace: dict, expr: str): + self.context_trace = context_trace + self.eval_stack = [expr] + + def parent(self): + return self.__context__ + + def error_name(self): + return type(self.parent()).__name__ + + def context_trace_str(self): + return "\n".join( + f" {k} = {repr(v)} ({type(v).__name__})" + for k, v in self.context_trace.items() + ) + + def stack_trace_str(self): + return "\n".join( + f" {len(self.eval_stack) - i}: {expr}{i == 0 and ' <-- ' + self.error_name() or ''}" + for i, expr in enumerate(self.eval_stack) + ) -class Jinja: +class TrackerContext(jinja.runtime.Context): + def resolve_or_missing(self, key): + val = super().resolve_or_missing(key) + if isinstance(val, JinjaStr): + self.environment.context_trace[key] = val + val, _ = self.environment.expand(val) + self.environment.context_trace[key] = val + return val + + +class Jinja(SandboxedEnvironment): """ Wraps a Jinja environment """ def __init__(self, context_vars): - self.env = SandboxedEnvironment( + super().__init__( trim_blocks=True, lstrip_blocks=True, block_start_string="<%", @@ -82,13 +126,20 @@ class Jinja: variable_end_string="}", undefined=jinja.StrictUndefined, ) - self.env.add_extension("jinja2.ext.do") - self.env.globals["math"] = math # Inject entire math module + self.context_class = TrackerContext + self.add_extension("jinja2.ext.do") + self.context_trace = {} self.context_vars = {**context_vars} - self.env.globals = { - **self.env.globals, + for k, v in self.context_vars.items(): + if isinstance(v, ESPLiteralValue): + continue + if isinstance(v, str) and not isinstance(v, JinjaStr) and has_jinja(v): + self.context_vars[k] = JinjaStr(v, self.context_vars) + + self.globals = { + **self.globals, **self.context_vars, - **SAFE_GLOBAL_FUNCTIONS, + **SAFE_GLOBALS, } def safe_eval(self, expr): @@ -110,23 +161,43 @@ class Jinja: result = None override_vars = {} if isinstance(content_str, JinjaStr): + if content_str.result is not JinjaStr.Undefined: + return content_str.result, None # If `value` is already a JinjaStr, it means we are trying to evaluate it again # in a parent pass. # Hopefully, all required variables are visible now. override_vars = content_str.upvalues + + old_trace = self.context_trace + self.context_trace = {} try: - template = self.env.from_string(content_str) + template = self.from_string(content_str) result = self.safe_eval(template.render(override_vars)) if isinstance(result, Undefined): - # This happens when the expression is simply an undefined variable. Jinja does not - # raise an exception, instead we get "Undefined". - # Trigger an UndefinedError exception so we skip to below. - print("" + result) + print("" + result) # force a UndefinedError exception except (TemplateSyntaxError, UndefinedError) as err: # `content_str` contains a Jinja expression that refers to a variable that is undefined # in this scope. Perhaps it refers to a root substitution that is not visible yet. - # Therefore, return the original `content_str` as a JinjaStr, which contains the variables + # Therefore, return `content_str` as a JinjaStr, which contains the variables # that are actually visible to it at this point to postpone evaluation. return JinjaStr(content_str, {**self.context_vars, **override_vars}), err + except JinjaError as err: + err.context_trace = {**self.context_trace, **err.context_trace} + err.eval_stack.append(content_str) + raise err + except ( + TemplateError, + TemplateRuntimeError, + RuntimeError, + ArithmeticError, + AttributeError, + TypeError, + ) as err: + raise JinjaError(self.context_trace, content_str) from err + finally: + self.context_trace = old_trace + + if isinstance(content_str, JinjaStr): + content_str.result = result return result, None diff --git a/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml b/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml index 443cba144e..1a51fc44cf 100644 --- a/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml +++ b/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml @@ -8,6 +8,7 @@ substitutions: area: 25 numberOne: 1 var1: 79 + double_width: 14 test_list: - The area is 56 - 56 @@ -25,3 +26,4 @@ test_list: - ord("a") = 97 - chr(97) = a - len([1,2,3]) = 3 + - width = 7, double_width = 14 diff --git a/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml b/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml index 07ad992f1f..4612f581b5 100644 --- a/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml +++ b/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml @@ -8,6 +8,7 @@ substitutions: area: 25 numberOne: 1 var1: 79 + double_width: ${width * 2} test_list: - "The area is ${width * height}" @@ -23,3 +24,4 @@ test_list: - ord("a") = ${ ord("a") } - chr(97) = ${ chr(97) } - len([1,2,3]) = ${ len([1,2,3]) } + - width = ${width}, double_width = ${double_width} From 6a183679496fb13e464ec21728e25a200df40238 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 15:26:37 -1000 Subject: [PATCH 178/336] [cli] Add `analyze-memory` command (#11395) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/__main__.py | 91 +++++++++++ tests/unit_tests/test_main.py | 275 +++++++++++++++++++++++++++++++++- 2 files changed, 358 insertions(+), 8 deletions(-) diff --git a/esphome/__main__.py b/esphome/__main__.py index 982e00f5e1..26e5ae7424 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -62,6 +62,40 @@ from esphome.util import ( _LOGGER = logging.getLogger(__name__) +# Special non-component keys that appear in configs +_NON_COMPONENT_KEYS = frozenset( + { + CONF_ESPHOME, + "substitutions", + "packages", + "globals", + "external_components", + "<<", + } +) + + +def detect_external_components(config: ConfigType) -> set[str]: + """Detect external/custom components in the configuration. + + External components are those that appear in the config but are not + part of ESPHome's built-in components and are not special config keys. + + Args: + config: The ESPHome configuration dictionary + + Returns: + A set of external component names + """ + from esphome.analyze_memory.helpers import get_esphome_components + + builtin_components = get_esphome_components() + return { + key + for key in config + if key not in builtin_components and key not in _NON_COMPONENT_KEYS + } + class ArgsProtocol(Protocol): device: list[str] | None @@ -892,6 +926,54 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int: return 0 +def command_analyze_memory(args: ArgsProtocol, config: ConfigType) -> int: + """Analyze memory usage by component. + + This command compiles the configuration and performs memory analysis. + Compilation is fast if sources haven't changed (just relinking). + """ + from esphome import platformio_api + from esphome.analyze_memory.cli import MemoryAnalyzerCLI + + # Always compile to ensure fresh data (fast if no changes - just relinks) + exit_code = write_cpp(config) + if exit_code != 0: + return exit_code + exit_code = compile_program(args, config) + if exit_code != 0: + return exit_code + _LOGGER.info("Successfully compiled program.") + + # Get idedata for analysis + idedata = platformio_api.get_idedata(config) + if idedata is None: + _LOGGER.error("Failed to get IDE data for memory analysis") + return 1 + + firmware_elf = Path(idedata.firmware_elf_path) + + # Extract external components from config + external_components = detect_external_components(config) + _LOGGER.debug("Detected external components: %s", external_components) + + # Perform memory analysis + _LOGGER.info("Analyzing memory usage...") + analyzer = MemoryAnalyzerCLI( + str(firmware_elf), + idedata.objdump_path, + idedata.readelf_path, + external_components, + ) + analyzer.analyze() + + # Generate and display report + report = analyzer.generate_report() + print() + print(report) + + return 0 + + def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: new_name = args.name for c in new_name: @@ -1007,6 +1089,7 @@ POST_CONFIG_ACTIONS = { "idedata": command_idedata, "rename": command_rename, "discover": command_discover, + "analyze-memory": command_analyze_memory, } SIMPLE_CONFIG_ACTIONS = [ @@ -1292,6 +1375,14 @@ def parse_args(argv): ) parser_rename.add_argument("name", help="The new name for the device.", type=str) + parser_analyze_memory = subparsers.add_parser( + "analyze-memory", + help="Analyze memory usage by component.", + ) + parser_analyze_memory.add_argument( + "configuration", help="Your YAML configuration file(s).", nargs="+" + ) + # Keep backward compatibility with the old command line format of # esphome . # diff --git a/tests/unit_tests/test_main.py b/tests/unit_tests/test_main.py index 73dfe359f0..9119c88502 100644 --- a/tests/unit_tests/test_main.py +++ b/tests/unit_tests/test_main.py @@ -17,10 +17,12 @@ from esphome import platformio_api from esphome.__main__ import ( Purpose, choose_upload_log_host, + command_analyze_memory, command_clean_all, command_rename, command_update_all, command_wizard, + detect_external_components, get_port_type, has_ip_address, has_mqtt, @@ -226,13 +228,47 @@ def mock_run_external_process() -> Generator[Mock]: @pytest.fixture -def mock_run_external_command() -> Generator[Mock]: - """Mock run_external_command for testing.""" +def mock_run_external_command_main() -> Generator[Mock]: + """Mock run_external_command in __main__ module (different from platformio_api).""" with patch("esphome.__main__.run_external_command") as mock: mock.return_value = 0 # Default to success yield mock +@pytest.fixture +def mock_write_cpp() -> Generator[Mock]: + """Mock write_cpp for testing.""" + with patch("esphome.__main__.write_cpp") as mock: + mock.return_value = 0 # Default to success + yield mock + + +@pytest.fixture +def mock_compile_program() -> Generator[Mock]: + """Mock compile_program for testing.""" + with patch("esphome.__main__.compile_program") as mock: + mock.return_value = 0 # Default to success + yield mock + + +@pytest.fixture +def mock_get_esphome_components() -> Generator[Mock]: + """Mock get_esphome_components for testing.""" + with patch("esphome.analyze_memory.helpers.get_esphome_components") as mock: + mock.return_value = {"logger", "api", "ota"} + yield mock + + +@pytest.fixture +def mock_memory_analyzer_cli() -> Generator[Mock]: + """Mock MemoryAnalyzerCLI for testing.""" + with patch("esphome.analyze_memory.cli.MemoryAnalyzerCLI") as mock_class: + mock_analyzer = MagicMock() + mock_analyzer.generate_report.return_value = "Mock Memory Report" + mock_class.return_value = mock_analyzer + yield mock_class + + def test_choose_upload_log_host_with_string_default() -> None: """Test with a single string default device.""" setup_core() @@ -839,7 +875,7 @@ def test_upload_program_serial_esp8266_with_file( def test_upload_using_esptool_path_conversion( tmp_path: Path, - mock_run_external_command: Mock, + mock_run_external_command_main: Mock, mock_get_idedata: Mock, ) -> None: """Test upload_using_esptool properly converts Path objects to strings for esptool. @@ -875,10 +911,10 @@ def test_upload_using_esptool_path_conversion( assert result == 0 # Verify that run_external_command was called - assert mock_run_external_command.call_count == 1 + assert mock_run_external_command_main.call_count == 1 # Get the actual call arguments - call_args = mock_run_external_command.call_args[0] + call_args = mock_run_external_command_main.call_args[0] # The first argument should be esptool.main function, # followed by the command arguments @@ -917,7 +953,7 @@ def test_upload_using_esptool_path_conversion( def test_upload_using_esptool_with_file_path( tmp_path: Path, - mock_run_external_command: Mock, + mock_run_external_command_main: Mock, ) -> None: """Test upload_using_esptool with a custom file that's a Path object.""" setup_core(platform=PLATFORM_ESP8266, tmp_path=tmp_path, name="test") @@ -934,10 +970,10 @@ def test_upload_using_esptool_with_file_path( assert result == 0 # Verify that run_external_command was called - mock_run_external_command.assert_called_once() + mock_run_external_command_main.assert_called_once() # Get the actual call arguments - call_args = mock_run_external_command.call_args[0] + call_args = mock_run_external_command_main.call_args[0] cmd_list = list(call_args[1:]) # Skip the esptool.main function # Find the firmware path in the command @@ -2273,3 +2309,226 @@ def test_show_logs_api_mqtt_timeout_fallback( # Verify run_logs was called with only the static IP (MQTT failed) mock_run_logs.assert_called_once_with(CORE.config, ["192.168.1.100"]) + + +def test_detect_external_components_no_external( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components with no external components.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + "api": {}, + } + + result = detect_external_components(config) + + assert result == set() + mock_get_esphome_components.assert_called_once() + + +def test_detect_external_components_with_external( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components detects external components.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, # Built-in + "api": {}, # Built-in + "my_custom_sensor": {}, # External + "another_custom": {}, # External + "external_components": [], # Special key, not a component + "substitutions": {}, # Special key, not a component + } + + result = detect_external_components(config) + + assert result == {"my_custom_sensor", "another_custom"} + mock_get_esphome_components.assert_called_once() + + +def test_detect_external_components_filters_special_keys( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components filters out special config keys.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "substitutions": {"key": "value"}, + "packages": {}, + "globals": [], + "external_components": [], + "<<": {}, # YAML merge key + } + + result = detect_external_components(config) + + assert result == set() + mock_get_esphome_components.assert_called_once() + + +def test_command_analyze_memory_success( + tmp_path: Path, + capfd: CaptureFixture[str], + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, + mock_get_esphome_components: Mock, + mock_memory_analyzer_cli: Mock, +) -> None: + """Test command_analyze_memory with successful compilation and analysis.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + # Create firmware.elf file + firmware_path = ( + tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device" + ) + firmware_path.mkdir(parents=True, exist_ok=True) + firmware_elf = firmware_path / "firmware.elf" + firmware_elf.write_text("mock elf file") + + # Mock idedata + mock_idedata_obj = MagicMock(spec=platformio_api.IDEData) + mock_idedata_obj.firmware_elf_path = str(firmware_elf) + mock_idedata_obj.objdump_path = "/path/to/objdump" + mock_idedata_obj.readelf_path = "/path/to/readelf" + mock_get_idedata.return_value = mock_idedata_obj + + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + } + + args = MockArgs() + + result = command_analyze_memory(args, config) + + assert result == 0 + + # Verify compilation was done + mock_write_cpp.assert_called_once_with(config) + mock_compile_program.assert_called_once_with(args, config) + + # Verify analyzer was created with correct parameters + mock_memory_analyzer_cli.assert_called_once_with( + str(firmware_elf), + "/path/to/objdump", + "/path/to/readelf", + set(), # No external components + ) + + # Verify analysis was run + mock_analyzer = mock_memory_analyzer_cli.return_value + mock_analyzer.analyze.assert_called_once() + mock_analyzer.generate_report.assert_called_once() + + # Verify report was printed + captured = capfd.readouterr() + assert "Mock Memory Report" in captured.out + + +def test_command_analyze_memory_with_external_components( + tmp_path: Path, + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, + mock_get_esphome_components: Mock, + mock_memory_analyzer_cli: Mock, +) -> None: + """Test command_analyze_memory detects external components.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + # Create firmware.elf file + firmware_path = ( + tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device" + ) + firmware_path.mkdir(parents=True, exist_ok=True) + firmware_elf = firmware_path / "firmware.elf" + firmware_elf.write_text("mock elf file") + + # Mock idedata + mock_idedata_obj = MagicMock(spec=platformio_api.IDEData) + mock_idedata_obj.firmware_elf_path = str(firmware_elf) + mock_idedata_obj.objdump_path = "/path/to/objdump" + mock_idedata_obj.readelf_path = "/path/to/readelf" + mock_get_idedata.return_value = mock_idedata_obj + + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + "my_custom_component": {"param": "value"}, # External component + "external_components": [{"source": "github://user/repo"}], # Not a component + } + + args = MockArgs() + + result = command_analyze_memory(args, config) + + assert result == 0 + + # Verify analyzer was created with external components detected + mock_memory_analyzer_cli.assert_called_once_with( + str(firmware_elf), + "/path/to/objdump", + "/path/to/readelf", + {"my_custom_component"}, # External component detected + ) + + +def test_command_analyze_memory_write_cpp_fails( + tmp_path: Path, + mock_write_cpp: Mock, +) -> None: + """Test command_analyze_memory when write_cpp fails.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_write_cpp.return_value = 1 # Failure + + result = command_analyze_memory(args, config) + + assert result == 1 + mock_write_cpp.assert_called_once_with(config) + + +def test_command_analyze_memory_compile_fails( + tmp_path: Path, + mock_write_cpp: Mock, + mock_compile_program: Mock, +) -> None: + """Test command_analyze_memory when compilation fails.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_compile_program.return_value = 1 # Compilation failed + + result = command_analyze_memory(args, config) + + assert result == 1 + mock_write_cpp.assert_called_once_with(config) + mock_compile_program.assert_called_once_with(args, config) + + +def test_command_analyze_memory_no_idedata( + tmp_path: Path, + caplog: pytest.LogCaptureFixture, + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, +) -> None: + """Test command_analyze_memory when idedata cannot be retrieved.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_get_idedata.return_value = None # Failed to get idedata + + with caplog.at_level(logging.ERROR): + result = command_analyze_memory(args, config) + + assert result == 1 + assert "Failed to get IDE data for memory analysis" in caplog.text From 11b53096a6544e8332b6dcc4d894b096d3365b64 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 15:58:05 -1000 Subject: [PATCH 179/336] [ci] Fix fork PR workflow failing to find PRs from forks (#11396) --- .../workflows/ci-memory-impact-comment.yml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci-memory-impact-comment.yml b/.github/workflows/ci-memory-impact-comment.yml index 4ce7abfb85..eea1d2c148 100644 --- a/.github/workflows/ci-memory-impact-comment.yml +++ b/.github/workflows/ci-memory-impact-comment.yml @@ -28,20 +28,23 @@ jobs: run: | # Get PR details by searching for PR with matching head SHA # The workflow_run.pull_requests field is often empty for forks + # Use paginate to handle repos with many open PRs head_sha="${{ github.event.workflow_run.head_sha }}" - pr_data=$(gh api "/repos/${{ github.repository }}/commits/$head_sha/pulls" \ - --jq '.[0] | {number: .number, base_ref: .base.ref}') - if [ -z "$pr_data" ] || [ "$pr_data" == "null" ]; then + pr_data=$(gh api --paginate "/repos/${{ github.repository }}/pulls" \ + --jq ".[] | select(.head.sha == \"$head_sha\") | {number: .number, base_ref: .base.ref}" \ + | head -n 1) + + if [ -z "$pr_data" ]; then echo "No PR found for SHA $head_sha, skipping" - echo "skip=true" >> $GITHUB_OUTPUT + echo "skip=true" >> "$GITHUB_OUTPUT" exit 0 fi pr_number=$(echo "$pr_data" | jq -r '.number') base_ref=$(echo "$pr_data" | jq -r '.base_ref') - echo "pr_number=$pr_number" >> $GITHUB_OUTPUT - echo "base_ref=$base_ref" >> $GITHUB_OUTPUT + echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT" + echo "base_ref=$base_ref" >> "$GITHUB_OUTPUT" echo "Found PR #$pr_number targeting base branch: $base_ref" - name: Check out code from base repository @@ -87,9 +90,9 @@ jobs: if: steps.pr.outputs.skip != 'true' run: | if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then - echo "found=true" >> $GITHUB_OUTPUT + echo "found=true" >> "$GITHUB_OUTPUT" else - echo "found=false" >> $GITHUB_OUTPUT + echo "found=false" >> "$GITHUB_OUTPUT" echo "Memory analysis artifacts not found, skipping comment" fi From c15f1a9be88be6529b6fb952fd0ec854ceb21502 Mon Sep 17 00:00:00 2001 From: tomaszduda23 Date: Mon, 20 Oct 2025 04:11:44 +0200 Subject: [PATCH 180/336] [nrf52] add missing defines for tests (#11384) Co-authored-by: J. Nick Koston --- esphome/core/defines.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/esphome/core/defines.h b/esphome/core/defines.h index b1bd7f92d7..ff9afb9114 100644 --- a/esphome/core/defines.h +++ b/esphome/core/defines.h @@ -273,6 +273,8 @@ #ifdef USE_NRF52 #define USE_NRF52_DFU +#define USE_SOFTDEVICE_ID 7 +#define USE_SOFTDEVICE_VERSION 1 #endif // Disabled feature flags From 8f1c4634ecfd815629b62e66c2ebb192a4c38bf3 Mon Sep 17 00:00:00 2001 From: Stefan Rado <628587+kroimon@users.noreply.github.com> Date: Mon, 20 Oct 2025 04:49:06 +0200 Subject: [PATCH 181/336] [uponor_smatrix] Use combined 32 bit addresses instead of separate 16 bit system and device addresses (#11066) Co-authored-by: J. Nick Koston --- esphome/components/uponor_smatrix/__init__.py | 23 ++++++++----- .../climate/uponor_smatrix_climate.cpp | 2 +- .../sensor/uponor_smatrix_sensor.cpp | 2 +- .../uponor_smatrix/uponor_smatrix.cpp | 34 ++++++------------- .../uponor_smatrix/uponor_smatrix.h | 14 ++++---- tests/components/uponor_smatrix/common.yaml | 7 ++-- 6 files changed, 37 insertions(+), 45 deletions(-) diff --git a/esphome/components/uponor_smatrix/__init__.py b/esphome/components/uponor_smatrix/__init__.py index d4102d1026..9588b0df7f 100644 --- a/esphome/components/uponor_smatrix/__init__.py +++ b/esphome/components/uponor_smatrix/__init__.py @@ -17,6 +17,12 @@ UponorSmatrixDevice = uponor_smatrix_ns.class_( "UponorSmatrixDevice", cg.Parented.template(UponorSmatrixComponent) ) + +device_address = cv.All( + cv.hex_int, + cv.Range(min=0x1000000, max=0xFFFFFFFF, msg="Expected a 32 bit device address"), +) + CONF_UPONOR_SMATRIX_ID = "uponor_smatrix_id" CONF_TIME_DEVICE_ADDRESS = "time_device_address" @@ -24,9 +30,12 @@ CONFIG_SCHEMA = ( cv.Schema( { cv.GenerateID(): cv.declare_id(UponorSmatrixComponent), - cv.Optional(CONF_ADDRESS): cv.hex_uint16_t, + cv.Optional(CONF_ADDRESS): cv.invalid( + f"The '{CONF_ADDRESS}' option has been removed. " + "Use full 32 bit addresses in the device definitions instead." + ), cv.Optional(CONF_TIME_ID): cv.use_id(time.RealTimeClock), - cv.Optional(CONF_TIME_DEVICE_ADDRESS): cv.hex_uint16_t, + cv.Optional(CONF_TIME_DEVICE_ADDRESS): device_address, } ) .extend(cv.COMPONENT_SCHEMA) @@ -47,7 +56,7 @@ FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema( UPONOR_SMATRIX_DEVICE_SCHEMA = cv.Schema( { cv.GenerateID(CONF_UPONOR_SMATRIX_ID): cv.use_id(UponorSmatrixComponent), - cv.Required(CONF_ADDRESS): cv.hex_uint16_t, + cv.Required(CONF_ADDRESS): device_address, } ) @@ -58,17 +67,15 @@ async def to_code(config): await cg.register_component(var, config) await uart.register_uart_device(var, config) - if address := config.get(CONF_ADDRESS): - cg.add(var.set_system_address(address)) if time_id := config.get(CONF_TIME_ID): time_ = await cg.get_variable(time_id) cg.add(var.set_time_id(time_)) - if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS): - cg.add(var.set_time_device_address(time_device_address)) + if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS): + cg.add(var.set_time_device_address(time_device_address)) async def register_uponor_smatrix_device(var, config): parent = await cg.get_variable(config[CONF_UPONOR_SMATRIX_ID]) cg.add(var.set_parent(parent)) - cg.add(var.set_device_address(config[CONF_ADDRESS])) + cg.add(var.set_address(config[CONF_ADDRESS])) cg.add(parent.register_device(var)) diff --git a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp index 19a9112c73..8af106dfb7 100644 --- a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp +++ b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp @@ -10,7 +10,7 @@ static const char *const TAG = "uponor_smatrix.climate"; void UponorSmatrixClimate::dump_config() { LOG_CLIMATE("", "Uponor Smatrix Climate", this); - ESP_LOGCONFIG(TAG, " Device address: 0x%04X", this->address_); + ESP_LOGCONFIG(TAG, " Device address: 0x%08X", this->address_); } void UponorSmatrixClimate::loop() { diff --git a/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp b/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp index a1d0db214f..7ee12edcdb 100644 --- a/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp +++ b/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp @@ -9,7 +9,7 @@ static const char *const TAG = "uponor_smatrix.sensor"; void UponorSmatrixSensor::dump_config() { ESP_LOGCONFIG(TAG, "Uponor Smatrix Sensor\n" - " Device address: 0x%04X", + " Device address: 0x%08X", this->address_); LOG_SENSOR(" ", "Temperature", this->temperature_sensor_); LOG_SENSOR(" ", "External Temperature", this->external_temperature_sensor_); diff --git a/esphome/components/uponor_smatrix/uponor_smatrix.cpp b/esphome/components/uponor_smatrix/uponor_smatrix.cpp index 867305059f..221f07c80e 100644 --- a/esphome/components/uponor_smatrix/uponor_smatrix.cpp +++ b/esphome/components/uponor_smatrix/uponor_smatrix.cpp @@ -18,11 +18,10 @@ void UponorSmatrixComponent::setup() { void UponorSmatrixComponent::dump_config() { ESP_LOGCONFIG(TAG, "Uponor Smatrix"); - ESP_LOGCONFIG(TAG, " System address: 0x%04X", this->address_); #ifdef USE_TIME if (this->time_id_ != nullptr) { ESP_LOGCONFIG(TAG, " Time synchronization: YES"); - ESP_LOGCONFIG(TAG, " Time master device address: 0x%04X", this->time_device_address_); + ESP_LOGCONFIG(TAG, " Time master device address: 0x%08X", this->time_device_address_); } #endif @@ -31,7 +30,7 @@ void UponorSmatrixComponent::dump_config() { if (!this->unknown_devices_.empty()) { ESP_LOGCONFIG(TAG, " Detected unknown device addresses:"); for (auto device_address : this->unknown_devices_) { - ESP_LOGCONFIG(TAG, " 0x%04X", device_address); + ESP_LOGCONFIG(TAG, " 0x%08X", device_address); } } } @@ -89,8 +88,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return false; } - uint16_t system_address = encode_uint16(packet[0], packet[1]); - uint16_t device_address = encode_uint16(packet[2], packet[3]); + uint32_t device_address = encode_uint32(packet[0], packet[1], packet[2], packet[3]); uint16_t crc = encode_uint16(packet[packet_len - 1], packet[packet_len - 2]); uint16_t computed_crc = crc16(packet, packet_len - 2); @@ -99,24 +97,14 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return false; } - ESP_LOGV(TAG, "Received packet: sys=%04X, dev=%04X, data=%s, crc=%04X", system_address, device_address, + ESP_LOGV(TAG, "Received packet: addr=%08X, data=%s, crc=%04X", device_address, format_hex(&packet[4], packet_len - 6).c_str(), crc); - // Detect or check system address - if (this->address_ == 0) { - ESP_LOGI(TAG, "Using detected system address 0x%04X", system_address); - this->address_ = system_address; - } else if (this->address_ != system_address) { - // This should never happen except if the system address was set or detected incorrectly, so warn the user. - ESP_LOGW(TAG, "Received packet from unknown system address 0x%04X", system_address); - return true; - } - // Handle packet size_t data_len = (packet_len - 6) / 3; if (data_len == 0) { if (packet[4] == UPONOR_ID_REQUEST) - ESP_LOGVV(TAG, "Ignoring request packet for device 0x%04X", device_address); + ESP_LOGVV(TAG, "Ignoring request packet for device 0x%08X", device_address); return true; } @@ -141,7 +129,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { if (data[i].id == UPONOR_ID_DATETIME1) found_time = true; if (found_temperature && found_time) { - ESP_LOGI(TAG, "Using detected time device address 0x%04X", device_address); + ESP_LOGI(TAG, "Using detected time device address 0x%08X", device_address); this->time_device_address_ = device_address; break; } @@ -160,7 +148,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { // Log unknown device addresses if (!found && !this->unknown_devices_.count(device_address)) { - ESP_LOGI(TAG, "Received packet for unknown device address 0x%04X ", device_address); + ESP_LOGI(TAG, "Received packet for unknown device address 0x%08X ", device_address); this->unknown_devices_.insert(device_address); } @@ -168,16 +156,16 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return true; } -bool UponorSmatrixComponent::send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len) { - if (this->address_ == 0 || device_address == 0 || data == nullptr || data_len == 0) +bool UponorSmatrixComponent::send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len) { + if (device_address == 0 || data == nullptr || data_len == 0) return false; // Assemble packet for send queue. All fields are big-endian except for the little-endian checksum. std::vector packet; packet.reserve(6 + 3 * data_len); - packet.push_back(this->address_ >> 8); - packet.push_back(this->address_ >> 0); + packet.push_back(device_address >> 24); + packet.push_back(device_address >> 16); packet.push_back(device_address >> 8); packet.push_back(device_address >> 0); diff --git a/esphome/components/uponor_smatrix/uponor_smatrix.h b/esphome/components/uponor_smatrix/uponor_smatrix.h index e3e19a12fc..bd760f0d77 100644 --- a/esphome/components/uponor_smatrix/uponor_smatrix.h +++ b/esphome/components/uponor_smatrix/uponor_smatrix.h @@ -71,23 +71,21 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { void dump_config() override; void loop() override; - void set_system_address(uint16_t address) { this->address_ = address; } void register_device(UponorSmatrixDevice *device) { this->devices_.push_back(device); } - bool send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len); + bool send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len); #ifdef USE_TIME void set_time_id(time::RealTimeClock *time_id) { this->time_id_ = time_id; } - void set_time_device_address(uint16_t address) { this->time_device_address_ = address; } + void set_time_device_address(uint32_t address) { this->time_device_address_ = address; } void send_time() { this->send_time_requested_ = true; } #endif protected: bool parse_byte_(uint8_t byte); - uint16_t address_; std::vector devices_; - std::set unknown_devices_; + std::set unknown_devices_; std::vector rx_buffer_; std::queue> tx_queue_; @@ -96,7 +94,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { #ifdef USE_TIME time::RealTimeClock *time_id_{nullptr}; - uint16_t time_device_address_; + uint32_t time_device_address_; bool send_time_requested_; bool do_send_time_(); #endif @@ -104,7 +102,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { class UponorSmatrixDevice : public Parented { public: - void set_device_address(uint16_t address) { this->address_ = address; } + void set_address(uint32_t address) { this->address_ = address; } virtual void on_device_data(const UponorSmatrixData *data, size_t data_len) = 0; bool send(const UponorSmatrixData *data, size_t data_len) { @@ -113,7 +111,7 @@ class UponorSmatrixDevice : public Parented { protected: friend UponorSmatrixComponent; - uint16_t address_; + uint32_t address_; }; inline float raw_to_celsius(uint16_t raw) { diff --git a/tests/components/uponor_smatrix/common.yaml b/tests/components/uponor_smatrix/common.yaml index 786a604aec..7bb5e952ad 100644 --- a/tests/components/uponor_smatrix/common.yaml +++ b/tests/components/uponor_smatrix/common.yaml @@ -11,18 +11,17 @@ time: - 192.168.178.1 uponor_smatrix: - address: 0x110B time_id: sntp_time - time_device_address: 0xDE13 + time_device_address: 0x110BDE13 climate: - platform: uponor_smatrix - address: 0xDE13 + address: 0x110BDE13 name: Thermostat Living Room sensor: - platform: uponor_smatrix - address: 0xDE13 + address: 0x110BDE13 humidity: name: Thermostat Humidity Living Room temperature: From 22fec4329f30877c69b1e43c984f4692f5b97a2f Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 16:02:03 +1300 Subject: [PATCH 182/336] [fan] Clean up deprecated code from 2022.2 (#11392) --- esphome/components/fan/__init__.py | 1 - esphome/components/fan/automation.h | 4 ++-- esphome/components/fan/fan_state.cpp | 16 ------------- esphome/components/fan/fan_state.h | 34 ---------------------------- esphome/components/mqtt/mqtt_fan.h | 2 +- esphome/core/application.h | 2 +- esphome/core/controller.h | 2 +- 7 files changed, 5 insertions(+), 56 deletions(-) delete mode 100644 esphome/components/fan/fan_state.cpp delete mode 100644 esphome/components/fan/fan_state.h diff --git a/esphome/components/fan/__init__.py b/esphome/components/fan/__init__.py index da8bf850c7..245c9f04b4 100644 --- a/esphome/components/fan/__init__.py +++ b/esphome/components/fan/__init__.py @@ -38,7 +38,6 @@ IS_PLATFORM_COMPONENT = True fan_ns = cg.esphome_ns.namespace("fan") Fan = fan_ns.class_("Fan", cg.EntityBase) -FanState = fan_ns.class_("Fan", Fan, cg.Component) FanDirection = fan_ns.enum("FanDirection", is_class=True) FAN_DIRECTION_ENUM = { diff --git a/esphome/components/fan/automation.h b/esphome/components/fan/automation.h index d480a2ef44..90661c307c 100644 --- a/esphome/components/fan/automation.h +++ b/esphome/components/fan/automation.h @@ -1,8 +1,8 @@ #pragma once -#include "esphome/core/component.h" #include "esphome/core/automation.h" -#include "fan_state.h" +#include "esphome/core/component.h" +#include "fan.h" namespace esphome { namespace fan { diff --git a/esphome/components/fan/fan_state.cpp b/esphome/components/fan/fan_state.cpp deleted file mode 100644 index 7c1658fb2e..0000000000 --- a/esphome/components/fan/fan_state.cpp +++ /dev/null @@ -1,16 +0,0 @@ -#include "fan_state.h" - -namespace esphome { -namespace fan { - -static const char *const TAG = "fan"; - -void FanState::setup() { - auto restore = this->restore_state_(); - if (restore) - restore->to_call(*this).perform(); -} -float FanState::get_setup_priority() const { return setup_priority::DATA - 1.0f; } - -} // namespace fan -} // namespace esphome diff --git a/esphome/components/fan/fan_state.h b/esphome/components/fan/fan_state.h deleted file mode 100644 index 5926e700b0..0000000000 --- a/esphome/components/fan/fan_state.h +++ /dev/null @@ -1,34 +0,0 @@ -#pragma once - -#include "esphome/core/component.h" -#include "fan.h" - -namespace esphome { -namespace fan { - -enum ESPDEPRECATED("LegacyFanDirection members are deprecated, use FanDirection instead.", - "2022.2") LegacyFanDirection { - FAN_DIRECTION_FORWARD = 0, - FAN_DIRECTION_REVERSE = 1 -}; - -class ESPDEPRECATED("FanState is deprecated, use Fan instead.", "2022.2") FanState : public Fan, public Component { - public: - FanState() = default; - - /// Get the traits of this fan. - FanTraits get_traits() override { return this->traits_; } - /// Set the traits of this fan (i.e. what features it supports). - void set_traits(const FanTraits &traits) { this->traits_ = traits; } - - void setup() override; - float get_setup_priority() const override; - - protected: - void control(const FanCall &call) override { this->publish_state(); } - - FanTraits traits_{}; -}; - -} // namespace fan -} // namespace esphome diff --git a/esphome/components/mqtt/mqtt_fan.h b/esphome/components/mqtt/mqtt_fan.h index fdcec0782d..78641d224f 100644 --- a/esphome/components/mqtt/mqtt_fan.h +++ b/esphome/components/mqtt/mqtt_fan.h @@ -5,7 +5,7 @@ #ifdef USE_MQTT #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #include "mqtt_component.h" namespace esphome { diff --git a/esphome/core/application.h b/esphome/core/application.h index 6e7f1b49f2..29a734f000 100644 --- a/esphome/core/application.h +++ b/esphome/core/application.h @@ -39,7 +39,7 @@ #include "esphome/components/text_sensor/text_sensor.h" #endif #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #endif #ifdef USE_CLIMATE #include "esphome/components/climate/climate.h" diff --git a/esphome/core/controller.h b/esphome/core/controller.h index 1a5b9ea6b4..b475e326ee 100644 --- a/esphome/core/controller.h +++ b/esphome/core/controller.h @@ -5,7 +5,7 @@ #include "esphome/components/binary_sensor/binary_sensor.h" #endif #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #endif #ifdef USE_LIGHT #include "esphome/components/light/light_state.h" From dd732dd1554d7f70dbee4fe9d5c0e2a72a1a94b7 Mon Sep 17 00:00:00 2001 From: Clyde Stubbs <2366188+clydebarrow@users.noreply.github.com> Date: Mon, 20 Oct 2025 13:09:36 +1000 Subject: [PATCH 183/336] [mipi_rgb] Add Waveshare 5" 1024x600 (#11206) --- esphome/components/mipi_rgb/models/waveshare.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/esphome/components/mipi_rgb/models/waveshare.py b/esphome/components/mipi_rgb/models/waveshare.py index a38493e816..0fc765fd52 100644 --- a/esphome/components/mipi_rgb/models/waveshare.py +++ b/esphome/components/mipi_rgb/models/waveshare.py @@ -30,6 +30,19 @@ wave_4_3 = DriverChip( "blue": [14, 38, 18, 17, 10], }, ) + +wave_4_3.extend( + "WAVESHARE-5-1024X600", + width=1024, + height=600, + hsync_back_porch=145, + hsync_front_porch=170, + hsync_pulse_width=30, + vsync_back_porch=23, + vsync_front_porch=12, + vsync_pulse_width=2, +) + wave_4_3.extend( "ESP32-S3-TOUCH-LCD-7-800X480", enable_pin=[{"ch422g": None, "number": 2}, {"ch422g": None, "number": 6}], From 255b5a3abdc435f7442a3e64d13e6ec18f7fb92b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 17:13:08 -1000 Subject: [PATCH 184/336] [ci] Skip memory analysis when only Python/config files change in core (#11397) --- script/determine-jobs.py | 20 ++++++++++------- tests/script/test_determine_jobs.py | 33 +++++++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 570b1a762c..a0e04a256e 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -273,6 +273,9 @@ def detect_memory_impact_config( building a merged configuration with all changed components (like test_build_components.py does) to get comprehensive memory analysis. + For core C++ file changes without component changes, runs a fallback + analysis using a representative component to measure the impact. + Args: branch: Branch to compare against @@ -289,7 +292,7 @@ def detect_memory_impact_config( # Find all changed components (excluding core and base bus components) changed_component_set: set[str] = set() - has_core_changes = False + has_core_cpp_changes = False for file in files: component = get_component_from_path(file) @@ -297,22 +300,23 @@ def detect_memory_impact_config( # Skip base bus components as they're used across many builds if component not in BASE_BUS_COMPONENTS: changed_component_set.add(component) - elif file.startswith("esphome/"): - # Core ESPHome files changed (not component-specific) - has_core_changes = True + elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS): + # Core ESPHome C++ files changed (not component-specific) + # Only C++ files affect memory usage + has_core_cpp_changes = True - # If no components changed but core changed, test representative component + # If no components changed but core C++ changed, test representative component force_fallback_platform = False - if not changed_component_set and has_core_changes: + if not changed_component_set and has_core_cpp_changes: print( - f"Memory impact: No components changed, but core files changed. " + f"Memory impact: No components changed, but core C++ files changed. " f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.", file=sys.stderr, ) changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT) force_fallback_platform = True # Use fallback platform (most representative) elif not changed_component_set: - # No components and no core changes + # No components and no core C++ changes return {"should_run": "false"} # Find components that have tests and collect their supported platforms diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index b479fc03c5..7587dbee69 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -545,7 +545,7 @@ def test_detect_memory_impact_config_with_common_platform(tmp_path: Path) -> Non def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: - """Test memory impact detection with core-only changes (no component changes).""" + """Test memory impact detection with core C++ changes (no component changes).""" # Create test directory structure with fallback component tests_dir = tmp_path / "tests" / "components" @@ -554,7 +554,7 @@ def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: api_dir.mkdir(parents=True) (api_dir / "test.esp32-idf.yaml").write_text("test: api") - # Mock changed_files to return only core files (no component files) + # Mock changed_files to return only core C++ files (no component files) with ( patch.object(determine_jobs, "root_path", str(tmp_path)), patch.object(helpers, "root_path", str(tmp_path)), @@ -574,6 +574,35 @@ def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: assert result["use_merged_config"] == "true" +def test_detect_memory_impact_config_core_python_only_changes(tmp_path: Path) -> None: + """Test that Python-only core changes don't trigger memory impact analysis.""" + # Create test directory structure with fallback component + tests_dir = tmp_path / "tests" / "components" + + # api component (fallback component) with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return only core Python files (no C++ files) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/__main__.py", + "esphome/config.py", + "esphome/core/config.py", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Python-only changes should NOT trigger memory impact analysis + assert result["should_run"] == "false" + + def test_detect_memory_impact_config_no_common_platform(tmp_path: Path) -> None: """Test memory impact detection when components have no common platform.""" # Create test directory structure From c00977df54bbb158dd66dd00d4320b55cfbcb24e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 17:27:04 -1000 Subject: [PATCH 185/336] [climate] Add basic compile tests for climate component (#11404) --- tests/components/climate/common.yaml | 31 +++++++++++++++++++ .../components/climate/test.esp8266-ard.yaml | 1 + 2 files changed, 32 insertions(+) create mode 100644 tests/components/climate/common.yaml create mode 100644 tests/components/climate/test.esp8266-ard.yaml diff --git a/tests/components/climate/common.yaml b/tests/components/climate/common.yaml new file mode 100644 index 0000000000..ff405b68e2 --- /dev/null +++ b/tests/components/climate/common.yaml @@ -0,0 +1,31 @@ +switch: + - platform: template + id: climate_heater_switch + optimistic: true + - platform: template + id: climate_cooler_switch + optimistic: true + +sensor: + - platform: template + id: climate_temperature_sensor + lambda: |- + return 21.5; + update_interval: 60s + +climate: + - platform: bang_bang + id: climate_test_climate + name: Test Climate + sensor: climate_temperature_sensor + default_target_temperature_low: 18°C + default_target_temperature_high: 24°C + idle_action: + - switch.turn_off: climate_heater_switch + - switch.turn_off: climate_cooler_switch + cool_action: + - switch.turn_on: climate_cooler_switch + - switch.turn_off: climate_heater_switch + heat_action: + - switch.turn_on: climate_heater_switch + - switch.turn_off: climate_cooler_switch diff --git a/tests/components/climate/test.esp8266-ard.yaml b/tests/components/climate/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/climate/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 1b38518c6373a16daae8c4b1081591770dfab51e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 17:45:44 -1000 Subject: [PATCH 186/336] [tests] Fix flaky test_noise_corrupt_encrypted_frame integration test (#11405) --- tests/integration/test_oversized_payloads.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/integration/test_oversized_payloads.py b/tests/integration/test_oversized_payloads.py index ba18e3d348..8bf890261a 100644 --- a/tests/integration/test_oversized_payloads.py +++ b/tests/integration/test_oversized_payloads.py @@ -281,8 +281,12 @@ async def test_noise_corrupt_encrypted_frame( # Check for signs that the process exited/crashed if "Segmentation fault" in line or "core dumped" in line: process_exited = True - # Check for the expected warning about decryption failure + # Check for the expected log about decryption failure + # This can appear as either a VV-level log from noise or a W-level log from connection if ( + "[VV][api.noise" in line + and "noise_cipherstate_decrypt failed: MAC_FAILURE" in line + ) or ( "[W][api.connection" in line and "Reading failed CIPHERSTATE_DECRYPT_FAILED" in line ): @@ -322,9 +326,9 @@ async def test_noise_corrupt_encrypted_frame( assert not process_exited, ( "ESPHome process should not crash on corrupt encrypted frames" ) - # Verify we saw the expected warning message + # Verify we saw the expected log message about decryption failure assert cipherstate_failed, ( - "Expected to see warning about CIPHERSTATE_DECRYPT_FAILED" + "Expected to see log about noise_cipherstate_decrypt failure or CIPHERSTATE_DECRYPT_FAILED" ) # Verify we can still reconnect after handling the corrupt frame From ae8336c26802fe5798a9dadd691d02edfb33564c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 19 Oct 2025 17:58:03 -1000 Subject: [PATCH 187/336] [esp32][ci] Fix IRAM overflow in grouped component tests for ESP32-IDF (#11386) --- esphome/components/esp32/__init__.py | 10 +++ esphome/components/esp32/iram_fix.py.script | 71 +++++++++++++++++++++ 2 files changed, 81 insertions(+) create mode 100644 esphome/components/esp32/iram_fix.py.script diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index e5725200a6..af84692615 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -779,6 +779,16 @@ async def to_code(config): Path(__file__).parent / "post_build.py.script", ) + # In testing mode, add IRAM fix script to allow linking grouped component tests + # Similar to ESP8266's approach but for ESP-IDF + if CORE.testing_mode: + cg.add_build_flag("-DESPHOME_TESTING_MODE") + add_extra_script( + "pre", + "iram_fix.py", + Path(__file__).parent / "iram_fix.py.script", + ) + if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF: cg.add_platformio_option("framework", "espidf") cg.add_build_flag("-DUSE_ESP_IDF") diff --git a/esphome/components/esp32/iram_fix.py.script b/esphome/components/esp32/iram_fix.py.script new file mode 100644 index 0000000000..0d23f9a81b --- /dev/null +++ b/esphome/components/esp32/iram_fix.py.script @@ -0,0 +1,71 @@ +import os +import re + +# pylint: disable=E0602 +Import("env") # noqa + +# IRAM size for testing mode (2MB - large enough to accommodate grouped tests) +TESTING_IRAM_SIZE = 0x200000 + + +def patch_idf_linker_script(source, target, env): + """Patch ESP-IDF linker script to increase IRAM size for testing mode.""" + # Check if we're in testing mode by looking for the define + build_flags = env.get("BUILD_FLAGS", []) + testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + + if not testing_mode: + return + + # For ESP-IDF, the linker scripts are generated in the build directory + build_dir = env.subst("$BUILD_DIR") + + # The memory.ld file is directly in the build directory + memory_ld = os.path.join(build_dir, "memory.ld") + + if not os.path.exists(memory_ld): + print(f"ESPHome: Warning - could not find linker script at {memory_ld}") + return + + try: + with open(memory_ld, "r") as f: + content = f.read() + except OSError as e: + print(f"ESPHome: Error reading linker script: {e}") + return + + # Check if this file contains iram0_0_seg + if 'iram0_0_seg' not in content: + print(f"ESPHome: Warning - iram0_0_seg not found in {memory_ld}") + return + + # Look for iram0_0_seg definition and increase its length + # ESP-IDF format can be: + # iram0_0_seg (RX) : org = 0x40080000, len = 0x20000 + 0x0 + # or more complex with nested parentheses: + # iram0_0_seg (RX) : org = (0x40370000 + 0x4000), len = (((0x403CB700 - (0x40378000 - 0x3FC88000)) - 0x3FC88000) + 0x8000 - 0x4000) + # We want to change len to TESTING_IRAM_SIZE for testing + + # Use a more robust approach: find the line and manually parse it + lines = content.split('\n') + for i, line in enumerate(lines): + if 'iram0_0_seg' in line and 'len' in line: + # Find the position of "len = " and replace everything after it until the end of the statement + match = re.search(r'(iram0_0_seg\s*\([^)]*\)\s*:\s*org\s*=\s*(?:\([^)]+\)|0x[0-9a-fA-F]+)\s*,\s*len\s*=\s*)(.+?)(\s*)$', line) + if match: + lines[i] = f"{match.group(1)}{TESTING_IRAM_SIZE:#x}{match.group(3)}" + break + + updated = '\n'.join(lines) + + if updated != content: + with open(memory_ld, "w") as f: + f.write(updated) + print(f"ESPHome: Patched IRAM size to {TESTING_IRAM_SIZE:#x} in {memory_ld} for testing mode") + else: + print(f"ESPHome: Warning - could not patch iram0_0_seg in {memory_ld}") + + +# Hook into the build process before linking +# For ESP-IDF, we need to run this after the linker scripts are generated +env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_idf_linker_script) From 319ba4a504001bd0db0708cf3628b6c971c255c3 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 20 Oct 2025 17:03:09 +1300 Subject: [PATCH 188/336] [cover] Clean up deprecated functions from 2021.9 (#11391) --- esphome/components/cover/cover.cpp | 18 ++---------------- esphome/components/cover/cover.h | 20 +------------------- 2 files changed, 3 insertions(+), 35 deletions(-) diff --git a/esphome/components/cover/cover.cpp b/esphome/components/cover/cover.cpp index 3378279371..654bb956a5 100644 --- a/esphome/components/cover/cover.cpp +++ b/esphome/components/cover/cover.cpp @@ -1,6 +1,6 @@ #include "cover.h" -#include "esphome/core/log.h" #include +#include "esphome/core/log.h" namespace esphome { namespace cover { @@ -144,21 +144,7 @@ CoverCall &CoverCall::set_stop(bool stop) { bool CoverCall::get_stop() const { return this->stop_; } CoverCall Cover::make_call() { return {this}; } -void Cover::open() { - auto call = this->make_call(); - call.set_command_open(); - call.perform(); -} -void Cover::close() { - auto call = this->make_call(); - call.set_command_close(); - call.perform(); -} -void Cover::stop() { - auto call = this->make_call(); - call.set_command_stop(); - call.perform(); -} + void Cover::add_on_state_callback(std::function &&f) { this->state_callback_.add(std::move(f)); } void Cover::publish_state(bool save) { this->position = clamp(this->position, 0.0f, 1.0f); diff --git a/esphome/components/cover/cover.h b/esphome/components/cover/cover.h index ada5953d57..d5db6cfb4f 100644 --- a/esphome/components/cover/cover.h +++ b/esphome/components/cover/cover.h @@ -4,6 +4,7 @@ #include "esphome/core/entity_base.h" #include "esphome/core/helpers.h" #include "esphome/core/preferences.h" + #include "cover_traits.h" namespace esphome { @@ -125,25 +126,6 @@ class Cover : public EntityBase, public EntityBase_DeviceClass { /// Construct a new cover call used to control the cover. CoverCall make_call(); - /** Open the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - */ - ESPDEPRECATED("open() is deprecated, use make_call().set_command_open().perform() instead.", "2021.9") - void open(); - /** Close the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - */ - ESPDEPRECATED("close() is deprecated, use make_call().set_command_close().perform() instead.", "2021.9") - void close(); - /** Stop the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - * As per solution from issue #2885 the call should include perform() - */ - ESPDEPRECATED("stop() is deprecated, use make_call().set_command_stop().perform() instead.", "2021.9") - void stop(); void add_on_state_callback(std::function &&f); From 118b1d8593d7ca59ace87e7838eb0b8f014edd68 Mon Sep 17 00:00:00 2001 From: Grant Le Roux Date: Mon, 20 Oct 2025 12:05:05 +0800 Subject: [PATCH 189/336] MQTT Light - Min/Max Color Temperature (#11103) Co-authored-by: Cram42 <5396871+cram42@users.noreply.github.com> Co-authored-by: J. Nick Koston --- esphome/components/mqtt/mqtt_light.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/esphome/components/mqtt/mqtt_light.cpp b/esphome/components/mqtt/mqtt_light.cpp index 4f5ff408a4..883b67ffc6 100644 --- a/esphome/components/mqtt/mqtt_light.cpp +++ b/esphome/components/mqtt/mqtt_light.cpp @@ -69,6 +69,12 @@ void MQTTJSONLightComponent::send_discovery(JsonObject root, mqtt::SendDiscovery if (traits.supports_color_capability(ColorCapability::BRIGHTNESS)) root["brightness"] = true; + if (traits.supports_color_mode(ColorMode::COLOR_TEMPERATURE) || + traits.supports_color_mode(ColorMode::COLD_WARM_WHITE)) { + root[MQTT_MIN_MIREDS] = traits.get_min_mireds(); + root[MQTT_MAX_MIREDS] = traits.get_max_mireds(); + } + if (this->state_->supports_effects()) { root["effect"] = true; JsonArray effect_list = root[MQTT_EFFECT_LIST].to(); From 6f5e36ffc331425ca36323692fe4739c16d71625 Mon Sep 17 00:00:00 2001 From: Keith Burzinski Date: Sun, 19 Oct 2025 23:42:54 -0500 Subject: [PATCH 190/336] [climate] First pass at some optimization (#11366) Co-authored-by: J. Nick Koston --- esphome/components/climate/climate.cpp | 256 ++++++++++++++----------- esphome/components/climate/climate.h | 91 ++++----- 2 files changed, 185 insertions(+), 162 deletions(-) diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index f3c93ed44e..24a3fe6d5a 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -6,6 +6,42 @@ namespace climate { static const char *const TAG = "climate"; +// Memory-efficient lookup tables +struct StringToUint8 { + const char *str; + const uint8_t value; +}; + +constexpr StringToUint8 CLIMATE_MODES_BY_STR[] = { + {"OFF", CLIMATE_MODE_OFF}, + {"AUTO", CLIMATE_MODE_AUTO}, + {"COOL", CLIMATE_MODE_COOL}, + {"HEAT", CLIMATE_MODE_HEAT}, + {"FAN_ONLY", CLIMATE_MODE_FAN_ONLY}, + {"DRY", CLIMATE_MODE_DRY}, + {"HEAT_COOL", CLIMATE_MODE_HEAT_COOL}, +}; + +constexpr StringToUint8 CLIMATE_FAN_MODES_BY_STR[] = { + {"ON", CLIMATE_FAN_ON}, {"OFF", CLIMATE_FAN_OFF}, {"AUTO", CLIMATE_FAN_AUTO}, + {"LOW", CLIMATE_FAN_LOW}, {"MEDIUM", CLIMATE_FAN_MEDIUM}, {"HIGH", CLIMATE_FAN_HIGH}, + {"MIDDLE", CLIMATE_FAN_MIDDLE}, {"FOCUS", CLIMATE_FAN_FOCUS}, {"DIFFUSE", CLIMATE_FAN_DIFFUSE}, + {"QUIET", CLIMATE_FAN_QUIET}, +}; + +constexpr StringToUint8 CLIMATE_PRESETS_BY_STR[] = { + {"ECO", CLIMATE_PRESET_ECO}, {"AWAY", CLIMATE_PRESET_AWAY}, {"BOOST", CLIMATE_PRESET_BOOST}, + {"COMFORT", CLIMATE_PRESET_COMFORT}, {"HOME", CLIMATE_PRESET_HOME}, {"SLEEP", CLIMATE_PRESET_SLEEP}, + {"ACTIVITY", CLIMATE_PRESET_ACTIVITY}, {"NONE", CLIMATE_PRESET_NONE}, +}; + +constexpr StringToUint8 CLIMATE_SWING_MODES_BY_STR[] = { + {"OFF", CLIMATE_SWING_OFF}, + {"BOTH", CLIMATE_SWING_BOTH}, + {"VERTICAL", CLIMATE_SWING_VERTICAL}, + {"HORIZONTAL", CLIMATE_SWING_HORIZONTAL}, +}; + void ClimateCall::perform() { this->parent_->control_callback_.call(*this); ESP_LOGD(TAG, "'%s' - Setting", this->parent_->get_name().c_str()); @@ -50,47 +86,46 @@ void ClimateCall::perform() { } this->parent_->control(*this); } + void ClimateCall::validate_() { auto traits = this->parent_->get_traits(); if (this->mode_.has_value()) { auto mode = *this->mode_; if (!traits.supports_mode(mode)) { - ESP_LOGW(TAG, " Mode %s is not supported by this device!", LOG_STR_ARG(climate_mode_to_string(mode))); + ESP_LOGW(TAG, " Mode %s not supported", LOG_STR_ARG(climate_mode_to_string(mode))); this->mode_.reset(); } } if (this->custom_fan_mode_.has_value()) { auto custom_fan_mode = *this->custom_fan_mode_; if (!traits.supports_custom_fan_mode(custom_fan_mode)) { - ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!", custom_fan_mode.c_str()); + ESP_LOGW(TAG, " Fan Mode %s not supported", custom_fan_mode.c_str()); this->custom_fan_mode_.reset(); } } else if (this->fan_mode_.has_value()) { auto fan_mode = *this->fan_mode_; if (!traits.supports_fan_mode(fan_mode)) { - ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!", - LOG_STR_ARG(climate_fan_mode_to_string(fan_mode))); + ESP_LOGW(TAG, " Fan Mode %s not supported", LOG_STR_ARG(climate_fan_mode_to_string(fan_mode))); this->fan_mode_.reset(); } } if (this->custom_preset_.has_value()) { auto custom_preset = *this->custom_preset_; if (!traits.supports_custom_preset(custom_preset)) { - ESP_LOGW(TAG, " Preset %s is not supported by this device!", custom_preset.c_str()); + ESP_LOGW(TAG, " Preset %s not supported", custom_preset.c_str()); this->custom_preset_.reset(); } } else if (this->preset_.has_value()) { auto preset = *this->preset_; if (!traits.supports_preset(preset)) { - ESP_LOGW(TAG, " Preset %s is not supported by this device!", LOG_STR_ARG(climate_preset_to_string(preset))); + ESP_LOGW(TAG, " Preset %s not supported", LOG_STR_ARG(climate_preset_to_string(preset))); this->preset_.reset(); } } if (this->swing_mode_.has_value()) { auto swing_mode = *this->swing_mode_; if (!traits.supports_swing_mode(swing_mode)) { - ESP_LOGW(TAG, " Swing Mode %s is not supported by this device!", - LOG_STR_ARG(climate_swing_mode_to_string(swing_mode))); + ESP_LOGW(TAG, " Swing Mode %s not supported", LOG_STR_ARG(climate_swing_mode_to_string(swing_mode))); this->swing_mode_.reset(); } } @@ -99,159 +134,127 @@ void ClimateCall::validate_() { if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGW(TAG, " Cannot set target temperature for climate device " - "with two-point target temperature!"); + "with two-point target temperature"); this->target_temperature_.reset(); } else if (std::isnan(target)) { - ESP_LOGW(TAG, " Target temperature must not be NAN!"); + ESP_LOGW(TAG, " Target temperature must not be NAN"); this->target_temperature_.reset(); } } if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) { if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { - ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!"); + ESP_LOGW(TAG, " Cannot set low/high target temperature"); this->target_temperature_low_.reset(); this->target_temperature_high_.reset(); } } if (this->target_temperature_low_.has_value() && std::isnan(*this->target_temperature_low_)) { - ESP_LOGW(TAG, " Target temperature low must not be NAN!"); + ESP_LOGW(TAG, " Target temperature low must not be NAN"); this->target_temperature_low_.reset(); } if (this->target_temperature_high_.has_value() && std::isnan(*this->target_temperature_high_)) { - ESP_LOGW(TAG, " Target temperature low must not be NAN!"); + ESP_LOGW(TAG, " Target temperature high must not be NAN"); this->target_temperature_high_.reset(); } if (this->target_temperature_low_.has_value() && this->target_temperature_high_.has_value()) { float low = *this->target_temperature_low_; float high = *this->target_temperature_high_; if (low > high) { - ESP_LOGW(TAG, " Target temperature low %.2f must be smaller than target temperature high %.2f!", low, high); + ESP_LOGW(TAG, " Target temperature low %.2f must be less than target temperature high %.2f", low, high); this->target_temperature_low_.reset(); this->target_temperature_high_.reset(); } } } + ClimateCall &ClimateCall::set_mode(ClimateMode mode) { this->mode_ = mode; return *this; } + ClimateCall &ClimateCall::set_mode(const std::string &mode) { - if (str_equals_case_insensitive(mode, "OFF")) { - this->set_mode(CLIMATE_MODE_OFF); - } else if (str_equals_case_insensitive(mode, "AUTO")) { - this->set_mode(CLIMATE_MODE_AUTO); - } else if (str_equals_case_insensitive(mode, "COOL")) { - this->set_mode(CLIMATE_MODE_COOL); - } else if (str_equals_case_insensitive(mode, "HEAT")) { - this->set_mode(CLIMATE_MODE_HEAT); - } else if (str_equals_case_insensitive(mode, "FAN_ONLY")) { - this->set_mode(CLIMATE_MODE_FAN_ONLY); - } else if (str_equals_case_insensitive(mode, "DRY")) { - this->set_mode(CLIMATE_MODE_DRY); - } else if (str_equals_case_insensitive(mode, "HEAT_COOL")) { - this->set_mode(CLIMATE_MODE_HEAT_COOL); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str()); + for (const auto &mode_entry : CLIMATE_MODES_BY_STR) { + if (str_equals_case_insensitive(mode, mode_entry.str)) { + this->set_mode(static_cast(mode_entry.value)); + return *this; + } } + ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str()); return *this; } + ClimateCall &ClimateCall::set_fan_mode(ClimateFanMode fan_mode) { this->fan_mode_ = fan_mode; this->custom_fan_mode_.reset(); return *this; } + ClimateCall &ClimateCall::set_fan_mode(const std::string &fan_mode) { - if (str_equals_case_insensitive(fan_mode, "ON")) { - this->set_fan_mode(CLIMATE_FAN_ON); - } else if (str_equals_case_insensitive(fan_mode, "OFF")) { - this->set_fan_mode(CLIMATE_FAN_OFF); - } else if (str_equals_case_insensitive(fan_mode, "AUTO")) { - this->set_fan_mode(CLIMATE_FAN_AUTO); - } else if (str_equals_case_insensitive(fan_mode, "LOW")) { - this->set_fan_mode(CLIMATE_FAN_LOW); - } else if (str_equals_case_insensitive(fan_mode, "MEDIUM")) { - this->set_fan_mode(CLIMATE_FAN_MEDIUM); - } else if (str_equals_case_insensitive(fan_mode, "HIGH")) { - this->set_fan_mode(CLIMATE_FAN_HIGH); - } else if (str_equals_case_insensitive(fan_mode, "MIDDLE")) { - this->set_fan_mode(CLIMATE_FAN_MIDDLE); - } else if (str_equals_case_insensitive(fan_mode, "FOCUS")) { - this->set_fan_mode(CLIMATE_FAN_FOCUS); - } else if (str_equals_case_insensitive(fan_mode, "DIFFUSE")) { - this->set_fan_mode(CLIMATE_FAN_DIFFUSE); - } else if (str_equals_case_insensitive(fan_mode, "QUIET")) { - this->set_fan_mode(CLIMATE_FAN_QUIET); - } else { - if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) { - this->custom_fan_mode_ = fan_mode; - this->fan_mode_.reset(); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str()); + for (const auto &mode_entry : CLIMATE_FAN_MODES_BY_STR) { + if (str_equals_case_insensitive(fan_mode, mode_entry.str)) { + this->set_fan_mode(static_cast(mode_entry.value)); + return *this; } } + if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) { + this->custom_fan_mode_ = fan_mode; + this->fan_mode_.reset(); + } else { + ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str()); + } return *this; } + ClimateCall &ClimateCall::set_fan_mode(optional fan_mode) { if (fan_mode.has_value()) { this->set_fan_mode(fan_mode.value()); } return *this; } + ClimateCall &ClimateCall::set_preset(ClimatePreset preset) { this->preset_ = preset; this->custom_preset_.reset(); return *this; } + ClimateCall &ClimateCall::set_preset(const std::string &preset) { - if (str_equals_case_insensitive(preset, "ECO")) { - this->set_preset(CLIMATE_PRESET_ECO); - } else if (str_equals_case_insensitive(preset, "AWAY")) { - this->set_preset(CLIMATE_PRESET_AWAY); - } else if (str_equals_case_insensitive(preset, "BOOST")) { - this->set_preset(CLIMATE_PRESET_BOOST); - } else if (str_equals_case_insensitive(preset, "COMFORT")) { - this->set_preset(CLIMATE_PRESET_COMFORT); - } else if (str_equals_case_insensitive(preset, "HOME")) { - this->set_preset(CLIMATE_PRESET_HOME); - } else if (str_equals_case_insensitive(preset, "SLEEP")) { - this->set_preset(CLIMATE_PRESET_SLEEP); - } else if (str_equals_case_insensitive(preset, "ACTIVITY")) { - this->set_preset(CLIMATE_PRESET_ACTIVITY); - } else if (str_equals_case_insensitive(preset, "NONE")) { - this->set_preset(CLIMATE_PRESET_NONE); - } else { - if (this->parent_->get_traits().supports_custom_preset(preset)) { - this->custom_preset_ = preset; - this->preset_.reset(); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str()); + for (const auto &preset_entry : CLIMATE_PRESETS_BY_STR) { + if (str_equals_case_insensitive(preset, preset_entry.str)) { + this->set_preset(static_cast(preset_entry.value)); + return *this; } } + if (this->parent_->get_traits().supports_custom_preset(preset)) { + this->custom_preset_ = preset; + this->preset_.reset(); + } else { + ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str()); + } return *this; } + ClimateCall &ClimateCall::set_preset(optional preset) { if (preset.has_value()) { this->set_preset(preset.value()); } return *this; } + ClimateCall &ClimateCall::set_swing_mode(ClimateSwingMode swing_mode) { this->swing_mode_ = swing_mode; return *this; } + ClimateCall &ClimateCall::set_swing_mode(const std::string &swing_mode) { - if (str_equals_case_insensitive(swing_mode, "OFF")) { - this->set_swing_mode(CLIMATE_SWING_OFF); - } else if (str_equals_case_insensitive(swing_mode, "BOTH")) { - this->set_swing_mode(CLIMATE_SWING_BOTH); - } else if (str_equals_case_insensitive(swing_mode, "VERTICAL")) { - this->set_swing_mode(CLIMATE_SWING_VERTICAL); - } else if (str_equals_case_insensitive(swing_mode, "HORIZONTAL")) { - this->set_swing_mode(CLIMATE_SWING_HORIZONTAL); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str()); + for (const auto &mode_entry : CLIMATE_SWING_MODES_BY_STR) { + if (str_equals_case_insensitive(swing_mode, mode_entry.str)) { + this->set_swing_mode(static_cast(mode_entry.value)); + return *this; + } } + ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str()); return *this; } @@ -259,59 +262,71 @@ ClimateCall &ClimateCall::set_target_temperature(float target_temperature) { this->target_temperature_ = target_temperature; return *this; } + ClimateCall &ClimateCall::set_target_temperature_low(float target_temperature_low) { this->target_temperature_low_ = target_temperature_low; return *this; } + ClimateCall &ClimateCall::set_target_temperature_high(float target_temperature_high) { this->target_temperature_high_ = target_temperature_high; return *this; } + ClimateCall &ClimateCall::set_target_humidity(float target_humidity) { this->target_humidity_ = target_humidity; return *this; } -const optional &ClimateCall::get_mode() const { return this->mode_; } const optional &ClimateCall::get_target_temperature() const { return this->target_temperature_; } const optional &ClimateCall::get_target_temperature_low() const { return this->target_temperature_low_; } const optional &ClimateCall::get_target_temperature_high() const { return this->target_temperature_high_; } const optional &ClimateCall::get_target_humidity() const { return this->target_humidity_; } + +const optional &ClimateCall::get_mode() const { return this->mode_; } const optional &ClimateCall::get_fan_mode() const { return this->fan_mode_; } -const optional &ClimateCall::get_custom_fan_mode() const { return this->custom_fan_mode_; } -const optional &ClimateCall::get_preset() const { return this->preset_; } -const optional &ClimateCall::get_custom_preset() const { return this->custom_preset_; } const optional &ClimateCall::get_swing_mode() const { return this->swing_mode_; } +const optional &ClimateCall::get_preset() const { return this->preset_; } +const optional &ClimateCall::get_custom_fan_mode() const { return this->custom_fan_mode_; } +const optional &ClimateCall::get_custom_preset() const { return this->custom_preset_; } + ClimateCall &ClimateCall::set_target_temperature_high(optional target_temperature_high) { this->target_temperature_high_ = target_temperature_high; return *this; } + ClimateCall &ClimateCall::set_target_temperature_low(optional target_temperature_low) { this->target_temperature_low_ = target_temperature_low; return *this; } + ClimateCall &ClimateCall::set_target_temperature(optional target_temperature) { this->target_temperature_ = target_temperature; return *this; } + ClimateCall &ClimateCall::set_target_humidity(optional target_humidity) { this->target_humidity_ = target_humidity; return *this; } + ClimateCall &ClimateCall::set_mode(optional mode) { this->mode_ = mode; return *this; } + ClimateCall &ClimateCall::set_fan_mode(optional fan_mode) { this->fan_mode_ = fan_mode; this->custom_fan_mode_.reset(); return *this; } + ClimateCall &ClimateCall::set_preset(optional preset) { this->preset_ = preset; this->custom_preset_.reset(); return *this; } + ClimateCall &ClimateCall::set_swing_mode(optional swing_mode) { this->swing_mode_ = swing_mode; return *this; @@ -336,6 +351,7 @@ optional Climate::restore_state_() { return {}; return recovered; } + void Climate::save_state_() { #if (defined(USE_ESP_IDF) || (defined(USE_ESP8266) && USE_ARDUINO_VERSION_CODE >= VERSION_CODE(3, 0, 0))) && \ !defined(CLANG_TIDY) @@ -398,6 +414,7 @@ void Climate::save_state_() { this->rtc_.save(&state); } + void Climate::publish_state() { ESP_LOGD(TAG, "'%s' - Sending state:", this->name_.c_str()); auto traits = this->get_traits(); @@ -469,16 +486,20 @@ ClimateTraits Climate::get_traits() { void Climate::set_visual_min_temperature_override(float visual_min_temperature_override) { this->visual_min_temperature_override_ = visual_min_temperature_override; } + void Climate::set_visual_max_temperature_override(float visual_max_temperature_override) { this->visual_max_temperature_override_ = visual_max_temperature_override; } + void Climate::set_visual_temperature_step_override(float target, float current) { this->visual_target_temperature_step_override_ = target; this->visual_current_temperature_step_override_ = current; } + void Climate::set_visual_min_humidity_override(float visual_min_humidity_override) { this->visual_min_humidity_override_ = visual_min_humidity_override; } + void Climate::set_visual_max_humidity_override(float visual_max_humidity_override) { this->visual_max_humidity_override_ = visual_max_humidity_override; } @@ -510,6 +531,7 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) { } return call; } + void ClimateDeviceRestoreState::apply(Climate *climate) { auto traits = climate->get_traits(); climate->mode = this->mode; @@ -579,68 +601,68 @@ void Climate::dump_traits_(const char *tag) { auto traits = this->get_traits(); ESP_LOGCONFIG(tag, "ClimateTraits:"); ESP_LOGCONFIG(tag, - " [x] Visual settings:\n" - " - Min temperature: %.1f\n" - " - Max temperature: %.1f\n" - " - Temperature step:\n" - " Target: %.1f", + " Visual settings:\n" + " - Min temperature: %.1f\n" + " - Max temperature: %.1f\n" + " - Temperature step:\n" + " Target: %.1f", traits.get_visual_min_temperature(), traits.get_visual_max_temperature(), traits.get_visual_target_temperature_step()); if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step()); + ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step()); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY | climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { ESP_LOGCONFIG(tag, - " - Min humidity: %.0f\n" - " - Max humidity: %.0f", + " - Min humidity: %.0f\n" + " - Max humidity: %.0f", traits.get_visual_min_humidity(), traits.get_visual_max_humidity()); } if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature"); + ESP_LOGCONFIG(tag, " Supports two-point target temperature"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " [x] Supports current temperature"); + ESP_LOGCONFIG(tag, " Supports current temperature"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { - ESP_LOGCONFIG(tag, " [x] Supports target humidity"); + ESP_LOGCONFIG(tag, " Supports target humidity"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { - ESP_LOGCONFIG(tag, " [x] Supports current humidity"); + ESP_LOGCONFIG(tag, " Supports current humidity"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) { - ESP_LOGCONFIG(tag, " [x] Supports action"); + ESP_LOGCONFIG(tag, " Supports action"); } if (!traits.get_supported_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported modes:"); + ESP_LOGCONFIG(tag, " Supported modes:"); for (ClimateMode m : traits.get_supported_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m))); } if (!traits.get_supported_fan_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported fan modes:"); + ESP_LOGCONFIG(tag, " Supported fan modes:"); for (ClimateFanMode m : traits.get_supported_fan_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m))); } if (!traits.get_supported_custom_fan_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported custom fan modes:"); + ESP_LOGCONFIG(tag, " Supported custom fan modes:"); for (const std::string &s : traits.get_supported_custom_fan_modes()) - ESP_LOGCONFIG(tag, " - %s", s.c_str()); + ESP_LOGCONFIG(tag, " - %s", s.c_str()); } if (!traits.get_supported_presets().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported presets:"); + ESP_LOGCONFIG(tag, " Supported presets:"); for (ClimatePreset p : traits.get_supported_presets()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p))); } if (!traits.get_supported_custom_presets().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported custom presets:"); + ESP_LOGCONFIG(tag, " Supported custom presets:"); for (const std::string &s : traits.get_supported_custom_presets()) - ESP_LOGCONFIG(tag, " - %s", s.c_str()); + ESP_LOGCONFIG(tag, " - %s", s.c_str()); } if (!traits.get_supported_swing_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported swing modes:"); + ESP_LOGCONFIG(tag, " Supported swing modes:"); for (ClimateSwingMode m : traits.get_supported_swing_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m))); } } diff --git a/esphome/components/climate/climate.h b/esphome/components/climate/climate.h index b31a2eedf6..495464c6a2 100644 --- a/esphome/components/climate/climate.h +++ b/esphome/components/climate/climate.h @@ -93,30 +93,31 @@ class ClimateCall { void perform(); - const optional &get_mode() const; const optional &get_target_temperature() const; const optional &get_target_temperature_low() const; const optional &get_target_temperature_high() const; const optional &get_target_humidity() const; + + const optional &get_mode() const; const optional &get_fan_mode() const; const optional &get_swing_mode() const; - const optional &get_custom_fan_mode() const; const optional &get_preset() const; + const optional &get_custom_fan_mode() const; const optional &get_custom_preset() const; protected: void validate_(); Climate *const parent_; - optional mode_; optional target_temperature_; optional target_temperature_low_; optional target_temperature_high_; optional target_humidity_; + optional mode_; optional fan_mode_; optional swing_mode_; - optional custom_fan_mode_; optional preset_; + optional custom_fan_mode_; optional custom_preset_; }; @@ -169,47 +170,6 @@ class Climate : public EntityBase { public: Climate() {} - /// The active mode of the climate device. - ClimateMode mode{CLIMATE_MODE_OFF}; - - /// The active state of the climate device. - ClimateAction action{CLIMATE_ACTION_OFF}; - - /// The current temperature of the climate device, as reported from the integration. - float current_temperature{NAN}; - - /// The current humidity of the climate device, as reported from the integration. - float current_humidity{NAN}; - - union { - /// The target temperature of the climate device. - float target_temperature; - struct { - /// The minimum target temperature of the climate device, for climate devices with split target temperature. - float target_temperature_low{NAN}; - /// The maximum target temperature of the climate device, for climate devices with split target temperature. - float target_temperature_high{NAN}; - }; - }; - - /// The target humidity of the climate device. - float target_humidity; - - /// The active fan mode of the climate device. - optional fan_mode; - - /// The active swing mode of the climate device. - ClimateSwingMode swing_mode; - - /// The active custom fan mode of the climate device. - optional custom_fan_mode; - - /// The active preset of the climate device. - optional preset; - - /// The active custom preset mode of the climate device. - optional custom_preset; - /** Add a callback for the climate device state, each time the state of the climate device is updated * (using publish_state), this callback will be called. * @@ -251,6 +211,47 @@ class Climate : public EntityBase { void set_visual_min_humidity_override(float visual_min_humidity_override); void set_visual_max_humidity_override(float visual_max_humidity_override); + /// The current temperature of the climate device, as reported from the integration. + float current_temperature{NAN}; + + /// The current humidity of the climate device, as reported from the integration. + float current_humidity{NAN}; + + union { + /// The target temperature of the climate device. + float target_temperature; + struct { + /// The minimum target temperature of the climate device, for climate devices with split target temperature. + float target_temperature_low{NAN}; + /// The maximum target temperature of the climate device, for climate devices with split target temperature. + float target_temperature_high{NAN}; + }; + }; + + /// The target humidity of the climate device. + float target_humidity; + + /// The active fan mode of the climate device. + optional fan_mode; + + /// The active preset of the climate device. + optional preset; + + /// The active custom fan mode of the climate device. + optional custom_fan_mode; + + /// The active custom preset mode of the climate device. + optional custom_preset; + + /// The active mode of the climate device. + ClimateMode mode{CLIMATE_MODE_OFF}; + + /// The active state of the climate device. + ClimateAction action{CLIMATE_ACTION_OFF}; + + /// The active swing mode of the climate device. + ClimateSwingMode swing_mode{CLIMATE_SWING_OFF}; + protected: friend ClimateCall; From 3d82c5baf7402de97a69fa7a0ec8208382f768f1 Mon Sep 17 00:00:00 2001 From: Aman kumar Date: Mon, 20 Oct 2025 11:40:38 +0530 Subject: [PATCH 191/336] [esp32_improv]: add next_url support for WiFi provisioning (#10757) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- esphome/components/esp32_improv/__init__.py | 114 ++++++++++-------- .../esp32_improv/esp32_improv_component.cpp | 13 +- .../esp32_improv/esp32_improv_component.h | 3 +- .../components/improv_base/improv_base.cpp | 41 ++++--- tests/components/esp32_improv/common.yaml | 1 + 5 files changed, 101 insertions(+), 71 deletions(-) diff --git a/esphome/components/esp32_improv/__init__.py b/esphome/components/esp32_improv/__init__.py index fa33bd947a..a55c819e6f 100644 --- a/esphome/components/esp32_improv/__init__.py +++ b/esphome/components/esp32_improv/__init__.py @@ -1,11 +1,11 @@ from esphome import automation import esphome.codegen as cg -from esphome.components import binary_sensor, esp32_ble, output +from esphome.components import binary_sensor, esp32_ble, improv_base, output from esphome.components.esp32_ble import BTLoggers import esphome.config_validation as cv from esphome.const import CONF_ID, CONF_ON_STATE, CONF_TRIGGER_ID -AUTO_LOAD = ["esp32_ble_server"] +AUTO_LOAD = ["esp32_ble_server", "improv_base"] CODEOWNERS = ["@jesserockz"] DEPENDENCIES = ["wifi", "esp32"] @@ -20,6 +20,7 @@ CONF_ON_STOP = "on_stop" CONF_STATUS_INDICATOR = "status_indicator" CONF_WIFI_TIMEOUT = "wifi_timeout" + improv_ns = cg.esphome_ns.namespace("improv") Error = improv_ns.enum("Error") State = improv_ns.enum("State") @@ -43,55 +44,63 @@ ESP32ImprovStoppedTrigger = esp32_improv_ns.class_( ) -CONFIG_SCHEMA = cv.Schema( - { - cv.GenerateID(): cv.declare_id(ESP32ImprovComponent), - cv.Required(CONF_AUTHORIZER): cv.Any( - cv.none, cv.use_id(binary_sensor.BinarySensor) - ), - cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput), - cv.Optional( - CONF_IDENTIFY_DURATION, default="10s" - ): cv.positive_time_period_milliseconds, - cv.Optional( - CONF_AUTHORIZED_DURATION, default="1min" - ): cv.positive_time_period_milliseconds, - cv.Optional( - CONF_WIFI_TIMEOUT, default="1min" - ): cv.positive_time_period_milliseconds, - cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovProvisionedTrigger - ), - } - ), - cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovProvisioningTrigger - ), - } - ), - cv.Optional(CONF_ON_START): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStartTrigger), - } - ), - cv.Optional(CONF_ON_STATE): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStateTrigger), - } - ), - cv.Optional(CONF_ON_STOP): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovStoppedTrigger - ), - } - ), - } -).extend(cv.COMPONENT_SCHEMA) +CONFIG_SCHEMA = ( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(ESP32ImprovComponent), + cv.Required(CONF_AUTHORIZER): cv.Any( + cv.none, cv.use_id(binary_sensor.BinarySensor) + ), + cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput), + cv.Optional( + CONF_IDENTIFY_DURATION, default="10s" + ): cv.positive_time_period_milliseconds, + cv.Optional( + CONF_AUTHORIZED_DURATION, default="1min" + ): cv.positive_time_period_milliseconds, + cv.Optional( + CONF_WIFI_TIMEOUT, default="1min" + ): cv.positive_time_period_milliseconds, + cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovProvisionedTrigger + ), + } + ), + cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovProvisioningTrigger + ), + } + ), + cv.Optional(CONF_ON_START): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStartTrigger + ), + } + ), + cv.Optional(CONF_ON_STATE): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStateTrigger + ), + } + ), + cv.Optional(CONF_ON_STOP): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStoppedTrigger + ), + } + ), + } + ) + .extend(improv_base.IMPROV_SCHEMA) + .extend(cv.COMPONENT_SCHEMA) +) async def to_code(config): @@ -102,7 +111,8 @@ async def to_code(config): await cg.register_component(var, config) cg.add_define("USE_IMPROV") - cg.add_library("improv/Improv", "1.2.4") + + await improv_base.setup_improv_core(var, config) cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION])) cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION])) diff --git a/esphome/components/esp32_improv/esp32_improv_component.cpp b/esphome/components/esp32_improv/esp32_improv_component.cpp index d83caf931b..526f7f4b42 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.cpp +++ b/esphome/components/esp32_improv/esp32_improv_component.cpp @@ -1,10 +1,10 @@ #include "esp32_improv_component.h" +#include "esphome/components/bytebuffer/bytebuffer.h" #include "esphome/components/esp32_ble/ble.h" #include "esphome/components/esp32_ble_server/ble_2902.h" #include "esphome/core/application.h" #include "esphome/core/log.h" -#include "esphome/components/bytebuffer/bytebuffer.h" #ifdef USE_ESP32 @@ -384,7 +384,16 @@ void ESP32ImprovComponent::check_wifi_connection_() { this->connecting_sta_ = {}; this->cancel_timeout("wifi-connect-timeout"); - std::vector urls = {ESPHOME_MY_LINK}; + std::vector urls; + + // Add next_url if configured (should be first per Improv BLE spec) + std::string next_url = this->get_formatted_next_url_(); + if (!next_url.empty()) { + urls.push_back(next_url); + } + + // Add default URLs for backward compatibility + urls.emplace_back(ESPHOME_MY_LINK); #ifdef USE_WEBSERVER for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) { if (ip.is_ip4()) { diff --git a/esphome/components/esp32_improv/esp32_improv_component.h b/esphome/components/esp32_improv/esp32_improv_component.h index 6782430ffe..fd3b2b861d 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.h +++ b/esphome/components/esp32_improv/esp32_improv_component.h @@ -7,6 +7,7 @@ #include "esphome/components/esp32_ble_server/ble_characteristic.h" #include "esphome/components/esp32_ble_server/ble_server.h" +#include "esphome/components/improv_base/improv_base.h" #include "esphome/components/wifi/wifi_component.h" #ifdef USE_ESP32_IMPROV_STATE_CALLBACK @@ -32,7 +33,7 @@ namespace esp32_improv { using namespace esp32_ble_server; -class ESP32ImprovComponent : public Component { +class ESP32ImprovComponent : public Component, public improv_base::ImprovBase { public: ESP32ImprovComponent(); void dump_config() override; diff --git a/esphome/components/improv_base/improv_base.cpp b/esphome/components/improv_base/improv_base.cpp index e890187d1a..89ee5492b5 100644 --- a/esphome/components/improv_base/improv_base.cpp +++ b/esphome/components/improv_base/improv_base.cpp @@ -10,27 +10,36 @@ std::string ImprovBase::get_formatted_next_url_() { if (this->next_url_.empty()) { return ""; } - std::string copy = this->next_url_; - // Device name - std::size_t pos = this->next_url_.find("{{device_name}}"); - if (pos != std::string::npos) { - const std::string &device_name = App.get_name(); - copy.replace(pos, 15, device_name); + + std::string formatted_url = this->next_url_; + + // Replace all occurrences of {{device_name}} + const std::string device_name_placeholder = "{{device_name}}"; + const std::string &device_name = App.get_name(); + size_t pos = 0; + while ((pos = formatted_url.find(device_name_placeholder, pos)) != std::string::npos) { + formatted_url.replace(pos, device_name_placeholder.length(), device_name); + pos += device_name.length(); } - // Ip address - pos = this->next_url_.find("{{ip_address}}"); - if (pos != std::string::npos) { - for (auto &ip : network::get_ip_addresses()) { - if (ip.is_ip4()) { - std::string ipa = ip.str(); - copy.replace(pos, 14, ipa); - break; - } + // Replace all occurrences of {{ip_address}} + const std::string ip_address_placeholder = "{{ip_address}}"; + std::string ip_address_str; + for (auto &ip : network::get_ip_addresses()) { + if (ip.is_ip4()) { + ip_address_str = ip.str(); + break; } } + pos = 0; + while ((pos = formatted_url.find(ip_address_placeholder, pos)) != std::string::npos) { + formatted_url.replace(pos, ip_address_placeholder.length(), ip_address_str); + pos += ip_address_str.length(); + } - return copy; + // Note: {{esphome_version}} is replaced at code generation time in Python + + return formatted_url; } } // namespace improv_base diff --git a/tests/components/esp32_improv/common.yaml b/tests/components/esp32_improv/common.yaml index 7eb3f9c0be..7dc2f7b6c7 100644 --- a/tests/components/esp32_improv/common.yaml +++ b/tests/components/esp32_improv/common.yaml @@ -16,3 +16,4 @@ esp32_improv: authorizer: io0_button authorized_duration: 1min status_indicator: built_in_led + next_url: "https://example.com/setup?device={{device_name}}&ip={{ip_address}}&version={{esphome_version}}" From 12e9c5e60eb9260a0c27e13a705b954629ea7535 Mon Sep 17 00:00:00 2001 From: Enrico Galli Date: Sun, 19 Oct 2025 23:11:09 -0700 Subject: [PATCH 192/336] [epaper_spi] Fix busy pin logic (#11349) Co-authored-by: J. Nick Koston --- esphome/components/epaper_spi/epaper_spi.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/epaper_spi/epaper_spi.cpp b/esphome/components/epaper_spi/epaper_spi.cpp index 21be4a2c05..9630ea7f8b 100644 --- a/esphome/components/epaper_spi/epaper_spi.cpp +++ b/esphome/components/epaper_spi/epaper_spi.cpp @@ -103,7 +103,7 @@ bool EPaperBase::is_idle_() { if (this->busy_pin_ == nullptr) { return true; } - return !this->busy_pin_->digital_read(); + return this->busy_pin_->digital_read(); } void EPaperBase::reset() { From ea4e5fd7bd2411034b58a20bf6a5c0d4390374ab Mon Sep 17 00:00:00 2001 From: Juan Antonio Aldea Date: Mon, 20 Oct 2025 10:20:39 +0200 Subject: [PATCH 193/336] [climate] Migrate components to the new API (#11369) Co-authored-by: J. Nick Koston Co-authored-by: Keith Burzinski --- esphome/components/anova/anova.h | 2 +- .../components/bang_bang/bang_bang_climate.cpp | 15 ++++++++------- .../components/bedjet/climate/bedjet_climate.h | 3 +-- esphome/components/climate_ir/climate_ir.cpp | 6 ++++-- esphome/components/daikin_arc/daikin_arc.cpp | 4 +--- esphome/components/demo/demo_climate.h | 10 ++++------ esphome/components/haier/haier_base.cpp | 2 +- esphome/components/midea/air_conditioner.cpp | 2 +- esphome/components/mitsubishi/mitsubishi.cpp | 5 +++-- esphome/components/pid/pid_climate.cpp | 6 ++---- esphome/components/tuya/climate/tuya_climate.cpp | 7 +++++-- .../climate/uponor_smatrix_climate.cpp | 5 ++--- esphome/components/yashima/yashima.cpp | 5 +++-- 13 files changed, 36 insertions(+), 36 deletions(-) diff --git a/esphome/components/anova/anova.h b/esphome/components/anova/anova.h index 560d96baa7..2e43ebfb98 100644 --- a/esphome/components/anova/anova.h +++ b/esphome/components/anova/anova.h @@ -28,7 +28,7 @@ class Anova : public climate::Climate, public esphome::ble_client::BLEClientNode void dump_config() override; climate::ClimateTraits traits() override { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::ClimateMode::CLIMATE_MODE_HEAT}); traits.set_visual_min_temperature(25.0); traits.set_visual_max_temperature(100.0); diff --git a/esphome/components/bang_bang/bang_bang_climate.cpp b/esphome/components/bang_bang/bang_bang_climate.cpp index bb85b49238..5d8c0eb7b7 100644 --- a/esphome/components/bang_bang/bang_bang_climate.cpp +++ b/esphome/components/bang_bang/bang_bang_climate.cpp @@ -56,26 +56,27 @@ void BangBangClimate::control(const climate::ClimateCall &call) { } climate::ClimateTraits BangBangClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | + climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); + if (this->humidity_sensor_ != nullptr) - traits.set_supports_current_humidity(true); - traits.set_supported_modes({ - climate::CLIMATE_MODE_OFF, - }); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); + + traits.set_supported_modes({climate::CLIMATE_MODE_OFF}); if (supports_cool_) traits.add_supported_mode(climate::CLIMATE_MODE_COOL); if (supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); if (supports_cool_ && supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL); - traits.set_supports_two_point_target_temperature(true); + if (supports_away_) { traits.set_supported_presets({ climate::CLIMATE_PRESET_HOME, climate::CLIMATE_PRESET_AWAY, }); } - traits.set_supports_action(true); + return traits; } void BangBangClimate::compute_state_() { diff --git a/esphome/components/bedjet/climate/bedjet_climate.h b/esphome/components/bedjet/climate/bedjet_climate.h index 7eaa735a3f..963f2e585a 100644 --- a/esphome/components/bedjet/climate/bedjet_climate.h +++ b/esphome/components/bedjet/climate/bedjet_climate.h @@ -33,8 +33,7 @@ class BedJetClimate : public climate::Climate, public BedJetClient, public Polli climate::ClimateTraits traits() override { auto traits = climate::ClimateTraits(); - traits.set_supports_action(true); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, diff --git a/esphome/components/climate_ir/climate_ir.cpp b/esphome/components/climate_ir/climate_ir.cpp index dc8117f6ae..2b95792a6c 100644 --- a/esphome/components/climate_ir/climate_ir.cpp +++ b/esphome/components/climate_ir/climate_ir.cpp @@ -8,7 +8,10 @@ static const char *const TAG = "climate_ir"; climate::ClimateTraits ClimateIR::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } + traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL}); if (this->supports_cool_) traits.add_supported_mode(climate::CLIMATE_MODE_COOL); @@ -19,7 +22,6 @@ climate::ClimateTraits ClimateIR::traits() { if (this->supports_fan_only_) traits.add_supported_mode(climate::CLIMATE_MODE_FAN_ONLY); - traits.set_supports_two_point_target_temperature(false); traits.set_visual_min_temperature(this->minimum_temperature_); traits.set_visual_max_temperature(this->maximum_temperature_); traits.set_visual_temperature_step(this->temperature_step_); diff --git a/esphome/components/daikin_arc/daikin_arc.cpp b/esphome/components/daikin_arc/daikin_arc.cpp index 068819ecd1..f05342f482 100644 --- a/esphome/components/daikin_arc/daikin_arc.cpp +++ b/esphome/components/daikin_arc/daikin_arc.cpp @@ -241,9 +241,7 @@ uint8_t DaikinArcClimate::humidity_() { climate::ClimateTraits DaikinArcClimate::traits() { climate::ClimateTraits traits = climate_ir::ClimateIR::traits(); - traits.set_supports_current_temperature(true); - traits.set_supports_current_humidity(false); - traits.set_supports_target_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY); traits.set_visual_min_humidity(38); traits.set_visual_max_humidity(52); return traits; diff --git a/esphome/components/demo/demo_climate.h b/esphome/components/demo/demo_climate.h index 1ba80aabf5..84b16e7ec5 100644 --- a/esphome/components/demo/demo_climate.h +++ b/esphome/components/demo/demo_climate.h @@ -82,16 +82,14 @@ class DemoClimate : public climate::Climate, public Component { climate::ClimateTraits traits{}; switch (type_) { case DemoClimateType::TYPE_1: - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, }); - traits.set_supports_action(true); traits.set_visual_temperature_step(0.5); break; case DemoClimateType::TYPE_2: - traits.set_supports_current_temperature(false); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, @@ -100,7 +98,7 @@ class DemoClimate : public climate::Climate, public Component { climate::CLIMATE_MODE_DRY, climate::CLIMATE_MODE_FAN_ONLY, }); - traits.set_supports_action(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_fan_modes({ climate::CLIMATE_FAN_ON, climate::CLIMATE_FAN_OFF, @@ -123,8 +121,8 @@ class DemoClimate : public climate::Climate, public Component { traits.set_supported_custom_presets({"My Preset"}); break; case DemoClimateType::TYPE_3: - traits.set_supports_current_temperature(true); - traits.set_supports_two_point_target_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | + climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_COOL, diff --git a/esphome/components/haier/haier_base.cpp b/esphome/components/haier/haier_base.cpp index 55a2454fca..5709b8e9b5 100644 --- a/esphome/components/haier/haier_base.cpp +++ b/esphome/components/haier/haier_base.cpp @@ -65,7 +65,7 @@ HaierClimateBase::HaierClimateBase() {climate::CLIMATE_FAN_AUTO, climate::CLIMATE_FAN_LOW, climate::CLIMATE_FAN_MEDIUM, climate::CLIMATE_FAN_HIGH}); this->traits_.set_supported_swing_modes({climate::CLIMATE_SWING_OFF, climate::CLIMATE_SWING_BOTH, climate::CLIMATE_SWING_VERTICAL, climate::CLIMATE_SWING_HORIZONTAL}); - this->traits_.set_supports_current_temperature(true); + this->traits_.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); } HaierClimateBase::~HaierClimateBase() {} diff --git a/esphome/components/midea/air_conditioner.cpp b/esphome/components/midea/air_conditioner.cpp index 170a2f6a40..0ad26ebd51 100644 --- a/esphome/components/midea/air_conditioner.cpp +++ b/esphome/components/midea/air_conditioner.cpp @@ -77,7 +77,7 @@ void AirConditioner::control(const ClimateCall &call) { ClimateTraits AirConditioner::traits() { auto traits = ClimateTraits(); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_visual_min_temperature(17); traits.set_visual_max_temperature(30); traits.set_visual_temperature_step(0.5); diff --git a/esphome/components/mitsubishi/mitsubishi.cpp b/esphome/components/mitsubishi/mitsubishi.cpp index 3d9207dd96..10ab4f3b5c 100644 --- a/esphome/components/mitsubishi/mitsubishi.cpp +++ b/esphome/components/mitsubishi/mitsubishi.cpp @@ -52,8 +52,9 @@ const uint8_t MITSUBISHI_BYTE16 = 0x00; climate::ClimateTraits MitsubishiClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); - traits.set_supports_action(false); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } traits.set_visual_min_temperature(MITSUBISHI_TEMP_MIN); traits.set_visual_max_temperature(MITSUBISHI_TEMP_MAX); traits.set_visual_temperature_step(1.0f); diff --git a/esphome/components/pid/pid_climate.cpp b/esphome/components/pid/pid_climate.cpp index 8b3be36dcc..fd74eabd87 100644 --- a/esphome/components/pid/pid_climate.cpp +++ b/esphome/components/pid/pid_climate.cpp @@ -54,11 +54,10 @@ void PIDClimate::control(const climate::ClimateCall &call) { } climate::ClimateTraits PIDClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); - traits.set_supports_two_point_target_temperature(false); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); if (this->humidity_sensor_ != nullptr) - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); traits.set_supported_modes({climate::CLIMATE_MODE_OFF}); if (supports_cool_()) @@ -68,7 +67,6 @@ climate::ClimateTraits PIDClimate::traits() { if (supports_heat_() && supports_cool_()) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL); - traits.set_supports_action(true); return traits; } void PIDClimate::dump_config() { diff --git a/esphome/components/tuya/climate/tuya_climate.cpp b/esphome/components/tuya/climate/tuya_climate.cpp index 7827a4e3ab..04fb14acff 100644 --- a/esphome/components/tuya/climate/tuya_climate.cpp +++ b/esphome/components/tuya/climate/tuya_climate.cpp @@ -283,8 +283,11 @@ void TuyaClimate::control_fan_mode_(const climate::ClimateCall &call) { climate::ClimateTraits TuyaClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_action(true); - traits.set_supports_current_temperature(this->current_temperature_id_.has_value()); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION); + if (this->current_temperature_id_.has_value()) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } + if (supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); if (supports_cool_) diff --git a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp index 8af106dfb7..4256b01c4e 100644 --- a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp +++ b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp @@ -30,10 +30,9 @@ void UponorSmatrixClimate::loop() { climate::ClimateTraits UponorSmatrixClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY | + climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_modes({climate::CLIMATE_MODE_HEAT}); - traits.set_supports_action(true); traits.set_supported_presets({climate::CLIMATE_PRESET_ECO}); traits.set_visual_min_temperature(this->min_temperature_); traits.set_visual_max_temperature(this->max_temperature_); diff --git a/esphome/components/yashima/yashima.cpp b/esphome/components/yashima/yashima.cpp index a3cf53ff66..bf91420620 100644 --- a/esphome/components/yashima/yashima.cpp +++ b/esphome/components/yashima/yashima.cpp @@ -81,7 +81,9 @@ const uint32_t YASHIMA_CARRIER_FREQUENCY = 38000; climate::ClimateTraits YashimaClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL}); if (supports_cool_) @@ -89,7 +91,6 @@ climate::ClimateTraits YashimaClimate::traits() { if (supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); - traits.set_supports_two_point_target_temperature(false); traits.set_visual_min_temperature(YASHIMA_TEMP_MIN); traits.set_visual_max_temperature(YASHIMA_TEMP_MAX); traits.set_visual_temperature_step(1); From 63f100a8ca8d90e408ef376017327554a768e1b2 Mon Sep 17 00:00:00 2001 From: Keith Burzinski Date: Mon, 20 Oct 2025 03:56:25 -0500 Subject: [PATCH 194/336] [bang_bang] Various clean-up (#11356) --- .../bang_bang/bang_bang_climate.cpp | 58 ++++++++++++------- .../components/bang_bang/bang_bang_climate.h | 29 +++++----- 2 files changed, 54 insertions(+), 33 deletions(-) diff --git a/esphome/components/bang_bang/bang_bang_climate.cpp b/esphome/components/bang_bang/bang_bang_climate.cpp index 5d8c0eb7b7..f26377a38a 100644 --- a/esphome/components/bang_bang/bang_bang_climate.cpp +++ b/esphome/components/bang_bang/bang_bang_climate.cpp @@ -6,6 +6,9 @@ namespace bang_bang { static const char *const TAG = "bang_bang.climate"; +BangBangClimate::BangBangClimate() + : idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {} + void BangBangClimate::setup() { this->sensor_->add_on_state_callback([this](float state) { this->current_temperature = state; @@ -31,54 +34,63 @@ void BangBangClimate::setup() { restore->to_call(this).perform(); } else { // restore from defaults, change_away handles those for us - if (supports_cool_ && supports_heat_) { + if (this->supports_cool_ && this->supports_heat_) { this->mode = climate::CLIMATE_MODE_HEAT_COOL; - } else if (supports_cool_) { + } else if (this->supports_cool_) { this->mode = climate::CLIMATE_MODE_COOL; - } else if (supports_heat_) { + } else if (this->supports_heat_) { this->mode = climate::CLIMATE_MODE_HEAT; } this->change_away_(false); } } + void BangBangClimate::control(const climate::ClimateCall &call) { - if (call.get_mode().has_value()) + if (call.get_mode().has_value()) { this->mode = *call.get_mode(); - if (call.get_target_temperature_low().has_value()) + } + if (call.get_target_temperature_low().has_value()) { this->target_temperature_low = *call.get_target_temperature_low(); - if (call.get_target_temperature_high().has_value()) + } + if (call.get_target_temperature_high().has_value()) { this->target_temperature_high = *call.get_target_temperature_high(); - if (call.get_preset().has_value()) + } + if (call.get_preset().has_value()) { this->change_away_(*call.get_preset() == climate::CLIMATE_PRESET_AWAY); + } this->compute_state_(); this->publish_state(); } + climate::ClimateTraits BangBangClimate::traits() { auto traits = climate::ClimateTraits(); traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); - - if (this->humidity_sensor_ != nullptr) + if (this->humidity_sensor_ != nullptr) { traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); - - traits.set_supported_modes({climate::CLIMATE_MODE_OFF}); - if (supports_cool_) + } + traits.set_supported_modes({ + climate::CLIMATE_MODE_OFF, + }); + if (this->supports_cool_) { traits.add_supported_mode(climate::CLIMATE_MODE_COOL); - if (supports_heat_) + } + if (this->supports_heat_) { traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); - if (supports_cool_ && supports_heat_) + } + if (this->supports_cool_ && this->supports_heat_) { traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL); - - if (supports_away_) { + } + if (this->supports_away_) { traits.set_supported_presets({ climate::CLIMATE_PRESET_HOME, climate::CLIMATE_PRESET_AWAY, }); } - return traits; } + void BangBangClimate::compute_state_() { if (this->mode == climate::CLIMATE_MODE_OFF) { this->switch_to_action_(climate::CLIMATE_ACTION_OFF); @@ -123,6 +135,7 @@ void BangBangClimate::compute_state_() { this->switch_to_action_(target_action); } + void BangBangClimate::switch_to_action_(climate::ClimateAction action) { if (action == this->action) { // already in target mode @@ -167,6 +180,7 @@ void BangBangClimate::switch_to_action_(climate::ClimateAction action) { this->prev_trigger_ = trig; this->publish_state(); } + void BangBangClimate::change_away_(bool away) { if (!away) { this->target_temperature_low = this->normal_config_.default_temperature_low; @@ -177,22 +191,26 @@ void BangBangClimate::change_away_(bool away) { } this->preset = away ? climate::CLIMATE_PRESET_AWAY : climate::CLIMATE_PRESET_HOME; } + void BangBangClimate::set_normal_config(const BangBangClimateTargetTempConfig &normal_config) { this->normal_config_ = normal_config; } + void BangBangClimate::set_away_config(const BangBangClimateTargetTempConfig &away_config) { this->supports_away_ = true; this->away_config_ = away_config; } -BangBangClimate::BangBangClimate() - : idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {} + void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; } void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; } + Trigger<> *BangBangClimate::get_idle_trigger() const { return this->idle_trigger_; } Trigger<> *BangBangClimate::get_cool_trigger() const { return this->cool_trigger_; } -void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; } Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; } + +void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; } void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; } + void BangBangClimate::dump_config() { LOG_CLIMATE("", "Bang Bang Climate", this); ESP_LOGCONFIG(TAG, diff --git a/esphome/components/bang_bang/bang_bang_climate.h b/esphome/components/bang_bang/bang_bang_climate.h index 96368af34c..2e7da93a07 100644 --- a/esphome/components/bang_bang/bang_bang_climate.h +++ b/esphome/components/bang_bang/bang_bang_climate.h @@ -25,14 +25,15 @@ class BangBangClimate : public climate::Climate, public Component { void set_sensor(sensor::Sensor *sensor); void set_humidity_sensor(sensor::Sensor *humidity_sensor); - Trigger<> *get_idle_trigger() const; - Trigger<> *get_cool_trigger() const; void set_supports_cool(bool supports_cool); - Trigger<> *get_heat_trigger() const; void set_supports_heat(bool supports_heat); void set_normal_config(const BangBangClimateTargetTempConfig &normal_config); void set_away_config(const BangBangClimateTargetTempConfig &away_config); + Trigger<> *get_idle_trigger() const; + Trigger<> *get_cool_trigger() const; + Trigger<> *get_heat_trigger() const; + protected: /// Override control to change settings of the climate device. void control(const climate::ClimateCall &call) override; @@ -56,16 +57,10 @@ class BangBangClimate : public climate::Climate, public Component { * * In idle mode, the controller is assumed to have both heating and cooling disabled. */ - Trigger<> *idle_trigger_; + Trigger<> *idle_trigger_{nullptr}; /** The trigger to call when the controller should switch to cooling mode. */ - Trigger<> *cool_trigger_; - /** Whether the controller supports cooling. - * - * A false value for this attribute means that the controller has no cooling action - * (for example a thermostat, where only heating and not-heating is possible). - */ - bool supports_cool_{false}; + Trigger<> *cool_trigger_{nullptr}; /** The trigger to call when the controller should switch to heating mode. * * A null value for this attribute means that the controller has no heating action @@ -73,15 +68,23 @@ class BangBangClimate : public climate::Climate, public Component { * (blinds open) is possible. */ Trigger<> *heat_trigger_{nullptr}; - bool supports_heat_{false}; /** A reference to the trigger that was previously active. * * This is so that the previous trigger can be stopped before enabling a new one. */ Trigger<> *prev_trigger_{nullptr}; - BangBangClimateTargetTempConfig normal_config_{}; + /** Whether the controller supports cooling/heating + * + * A false value for this attribute means that the controller has no respective action + * (for example a thermostat, where only heating and not-heating is possible). + */ + bool supports_cool_{false}; + bool supports_heat_{false}; + bool supports_away_{false}; + + BangBangClimateTargetTempConfig normal_config_{}; BangBangClimateTargetTempConfig away_config_{}; }; From 03def1391724a25420b9aeae2dd2c5227ac75d88 Mon Sep 17 00:00:00 2001 From: Peter Zich Date: Mon, 20 Oct 2025 06:13:13 -0700 Subject: [PATCH 195/336] [hdc1080] Make HDC1080_CMD_CONFIGURATION failure a warning (and log it) (#11355) Co-authored-by: J. Nick Koston --- esphome/components/hdc1080/hdc1080.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/hdc1080/hdc1080.cpp b/esphome/components/hdc1080/hdc1080.cpp index 71b7cd7e6e..fa293f6fc5 100644 --- a/esphome/components/hdc1080/hdc1080.cpp +++ b/esphome/components/hdc1080/hdc1080.cpp @@ -16,7 +16,8 @@ void HDC1080Component::setup() { // if configuration fails - there is a problem if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) { - this->mark_failed(); + ESP_LOGW(TAG, "Failed to configure HDC1080"); + this->status_set_warning(); return; } } From ca2fe994a17eb420b9243511b839db21f3699efe Mon Sep 17 00:00:00 2001 From: EasilyBoredEngineer <105184462+EasilyBoredEngineer@users.noreply.github.com> Date: Tue, 21 Oct 2025 00:44:20 +1000 Subject: [PATCH 196/336] [espnow] Add transport platform for packet_transport (#11025) Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Co-authored-by: J. Nick Koston --- CODEOWNERS | 1 + .../espnow/packet_transport/__init__.py | 39 ++++++++ .../packet_transport/espnow_transport.cpp | 97 +++++++++++++++++++ .../packet_transport/espnow_transport.h | 44 +++++++++ tests/components/espnow/common.yaml | 24 +++++ 5 files changed, 205 insertions(+) create mode 100644 esphome/components/espnow/packet_transport/__init__.py create mode 100644 esphome/components/espnow/packet_transport/espnow_transport.cpp create mode 100644 esphome/components/espnow/packet_transport/espnow_transport.h diff --git a/CODEOWNERS b/CODEOWNERS index 09bd15137a..4f860375d9 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -161,6 +161,7 @@ esphome/components/esp32_rmt_led_strip/* @jesserockz esphome/components/esp8266/* @esphome/core esphome/components/esp_ldo/* @clydebarrow esphome/components/espnow/* @jesserockz +esphome/components/espnow/packet_transport/* @EasilyBoredEngineer esphome/components/ethernet_info/* @gtjadsonsantos esphome/components/event/* @nohat esphome/components/exposure_notifications/* @OttoWinter diff --git a/esphome/components/espnow/packet_transport/__init__.py b/esphome/components/espnow/packet_transport/__init__.py new file mode 100644 index 0000000000..e6d66440db --- /dev/null +++ b/esphome/components/espnow/packet_transport/__init__.py @@ -0,0 +1,39 @@ +"""ESP-NOW transport platform for packet_transport component.""" + +import esphome.codegen as cg +from esphome.components.packet_transport import ( + PacketTransport, + new_packet_transport, + transport_schema, +) +import esphome.config_validation as cv +from esphome.core import HexInt +from esphome.cpp_types import PollingComponent + +from .. import ESPNowComponent, espnow_ns + +CODEOWNERS = ["@EasilyBoredEngineer"] +DEPENDENCIES = ["espnow"] + +ESPNowTransport = espnow_ns.class_("ESPNowTransport", PacketTransport, PollingComponent) + +CONF_ESPNOW_ID = "espnow_id" +CONF_PEER_ADDRESS = "peer_address" + +CONFIG_SCHEMA = transport_schema(ESPNowTransport).extend( + { + cv.GenerateID(CONF_ESPNOW_ID): cv.use_id(ESPNowComponent), + cv.Optional(CONF_PEER_ADDRESS, default="FF:FF:FF:FF:FF:FF"): cv.mac_address, + } +) + + +async def to_code(config): + """Set up the ESP-NOW transport component.""" + var, _ = await new_packet_transport(config) + + await cg.register_parented(var, config[CONF_ESPNOW_ID]) + + # Set peer address - convert MAC to parts array like ESP-NOW does + mac = config[CONF_PEER_ADDRESS] + cg.add(var.set_peer_address([HexInt(x) for x in mac.parts])) diff --git a/esphome/components/espnow/packet_transport/espnow_transport.cpp b/esphome/components/espnow/packet_transport/espnow_transport.cpp new file mode 100644 index 0000000000..d30e9447a0 --- /dev/null +++ b/esphome/components/espnow/packet_transport/espnow_transport.cpp @@ -0,0 +1,97 @@ +#include "espnow_transport.h" + +#ifdef USE_ESP32 + +#include "esphome/core/application.h" +#include "esphome/core/log.h" + +namespace esphome { +namespace espnow { + +static const char *const TAG = "espnow.transport"; + +bool ESPNowTransport::should_send() { return this->parent_ != nullptr && !this->parent_->is_failed(); } + +void ESPNowTransport::setup() { + packet_transport::PacketTransport::setup(); + + if (this->parent_ == nullptr) { + ESP_LOGE(TAG, "ESPNow component not set"); + this->mark_failed(); + return; + } + + ESP_LOGI(TAG, "Registering ESP-NOW handlers"); + ESP_LOGI(TAG, "Peer address: %02X:%02X:%02X:%02X:%02X:%02X", this->peer_address_[0], this->peer_address_[1], + this->peer_address_[2], this->peer_address_[3], this->peer_address_[4], this->peer_address_[5]); + + // Register received handler + this->parent_->register_received_handler(static_cast(this)); + + // Register broadcasted handler + this->parent_->register_broadcasted_handler(static_cast(this)); +} + +void ESPNowTransport::update() { + packet_transport::PacketTransport::update(); + this->updated_ = true; +} + +void ESPNowTransport::send_packet(const std::vector &buf) const { + if (this->parent_ == nullptr) { + ESP_LOGE(TAG, "ESPNow component not set"); + return; + } + + if (buf.empty()) { + ESP_LOGW(TAG, "Attempted to send empty packet"); + return; + } + + if (buf.size() > ESP_NOW_MAX_DATA_LEN) { + ESP_LOGE(TAG, "Packet too large: %zu bytes (max %d)", buf.size(), ESP_NOW_MAX_DATA_LEN); + return; + } + + // Send to configured peer address + this->parent_->send(this->peer_address_.data(), buf.data(), buf.size(), [](esp_err_t err) { + if (err != ESP_OK) { + ESP_LOGW(TAG, "Send failed: %d", err); + } + }); +} + +bool ESPNowTransport::on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) { + ESP_LOGV(TAG, "Received packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0], + info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]); + + if (data == nullptr || size == 0) { + ESP_LOGW(TAG, "Received empty or null packet"); + return false; + } + + this->packet_buffer_.resize(size); + memcpy(this->packet_buffer_.data(), data, size); + this->process_(this->packet_buffer_); + return false; // Allow other handlers to run +} + +bool ESPNowTransport::on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) { + ESP_LOGV(TAG, "Received broadcast packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0], + info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]); + + if (data == nullptr || size == 0) { + ESP_LOGW(TAG, "Received empty or null broadcast packet"); + return false; + } + + this->packet_buffer_.resize(size); + memcpy(this->packet_buffer_.data(), data, size); + this->process_(this->packet_buffer_); + return false; // Allow other handlers to run +} + +} // namespace espnow +} // namespace esphome + +#endif // USE_ESP32 diff --git a/esphome/components/espnow/packet_transport/espnow_transport.h b/esphome/components/espnow/packet_transport/espnow_transport.h new file mode 100644 index 0000000000..3629fad2cd --- /dev/null +++ b/esphome/components/espnow/packet_transport/espnow_transport.h @@ -0,0 +1,44 @@ +#pragma once + +#include "../espnow_component.h" + +#ifdef USE_ESP32 + +#include "esphome/core/component.h" +#include "esphome/components/packet_transport/packet_transport.h" + +#include + +namespace esphome { +namespace espnow { + +class ESPNowTransport : public packet_transport::PacketTransport, + public Parented, + public ESPNowReceivedPacketHandler, + public ESPNowBroadcastedHandler { + public: + void setup() override; + void update() override; + float get_setup_priority() const override { return setup_priority::AFTER_WIFI; } + + void set_peer_address(peer_address_t address) { + memcpy(this->peer_address_.data(), address.data(), ESP_NOW_ETH_ALEN); + } + + // ESPNow handler interface + bool on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override; + bool on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override; + + protected: + void send_packet(const std::vector &buf) const override; + size_t get_max_packet_size() override { return ESP_NOW_MAX_DATA_LEN; } + bool should_send() override; + + peer_address_t peer_address_{{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}}; + std::vector packet_buffer_; +}; + +} // namespace espnow +} // namespace esphome + +#endif // USE_ESP32 diff --git a/tests/components/espnow/common.yaml b/tests/components/espnow/common.yaml index abb31c12b8..895ffb9d15 100644 --- a/tests/components/espnow/common.yaml +++ b/tests/components/espnow/common.yaml @@ -1,4 +1,5 @@ espnow: + id: espnow_component auto_add_peer: false channel: 1 peers: @@ -50,3 +51,26 @@ espnow: - format_mac_address_pretty(info.src_addr).c_str() - format_hex_pretty(data, size).c_str() - info.rx_ctrl->rssi + +packet_transport: + - platform: espnow + id: transport1 + espnow_id: espnow_component + peer_address: "FF:FF:FF:FF:FF:FF" + encryption: + key: "0123456789abcdef0123456789abcdef" + sensors: + - temp_sensor + providers: + - name: test_provider + encryption: + key: "0123456789abcdef0123456789abcdef" + +sensor: + - platform: internal_temperature + id: temp_sensor + + - platform: packet_transport + provider: test_provider + remote_id: temp_sensor + id: remote_temp From abb57f08f5ac21085422a9e7a1d34a7720c5b4e9 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 20 Oct 2025 19:08:31 +0200 Subject: [PATCH 197/336] [pipsolar] cleanup / refactoring (#10291) Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> --- .../pipsolar/binary_sensor/__init__.py | 4 +- .../pipsolar/output/pipsolar_output.cpp | 2 +- esphome/components/pipsolar/pipsolar.cpp | 1157 ++++++++--------- esphome/components/pipsolar/pipsolar.h | 255 ++-- .../pipsolar/switch/pipsolar_switch.cpp | 4 +- 5 files changed, 655 insertions(+), 767 deletions(-) diff --git a/esphome/components/pipsolar/binary_sensor/__init__.py b/esphome/components/pipsolar/binary_sensor/__init__.py index 625c232ed5..5bcf1f75ee 100644 --- a/esphome/components/pipsolar/binary_sensor/__init__.py +++ b/esphome/components/pipsolar/binary_sensor/__init__.py @@ -62,7 +62,7 @@ CONF_WARNING_MPPT_OVERLOAD = "warning_mppt_overload" CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE = "warning_battery_too_low_to_charge" CONF_FAULT_DC_DC_OVER_CURRENT = "fault_dc_dc_over_current" CONF_FAULT_CODE = "fault_code" -CONF_WARNUNG_LOW_PV_ENERGY = "warnung_low_pv_energy" +CONF_WARNING_LOW_PV_ENERGY = "warning_low_pv_energy" CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START = ( "warning_high_ac_input_during_bus_soft_start" ) @@ -122,7 +122,7 @@ TYPES = [ CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE, CONF_FAULT_DC_DC_OVER_CURRENT, CONF_FAULT_CODE, - CONF_WARNUNG_LOW_PV_ENERGY, + CONF_WARNING_LOW_PV_ENERGY, CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START, CONF_WARNING_BATTERY_EQUALIZATION, ] diff --git a/esphome/components/pipsolar/output/pipsolar_output.cpp b/esphome/components/pipsolar/output/pipsolar_output.cpp index 00ec73b56a..163fbf4eb2 100644 --- a/esphome/components/pipsolar/output/pipsolar_output.cpp +++ b/esphome/components/pipsolar/output/pipsolar_output.cpp @@ -13,7 +13,7 @@ void PipsolarOutput::write_state(float state) { if (std::find(this->possible_values_.begin(), this->possible_values_.end(), state) != this->possible_values_.end()) { ESP_LOGD(TAG, "Will write: %s out of value %f / %02.0f", tmp, state, state); - this->parent_->switch_command(std::string(tmp)); + this->parent_->queue_command(std::string(tmp)); } else { ESP_LOGD(TAG, "Will not write: %s as it is not in list of allowed values", tmp); } diff --git a/esphome/components/pipsolar/pipsolar.cpp b/esphome/components/pipsolar/pipsolar.cpp index 5751ad59f5..b92cc3be9f 100644 --- a/esphome/components/pipsolar/pipsolar.cpp +++ b/esphome/components/pipsolar/pipsolar.cpp @@ -65,631 +65,42 @@ void Pipsolar::loop() { } } - if (this->state_ == STATE_POLL_DECODED) { - std::string mode; - switch (this->used_polling_commands_[this->last_polling_command_].identifier) { - case POLLING_QPIRI: - if (this->grid_rating_voltage_) { - this->grid_rating_voltage_->publish_state(value_grid_rating_voltage_); - } - if (this->grid_rating_current_) { - this->grid_rating_current_->publish_state(value_grid_rating_current_); - } - if (this->ac_output_rating_voltage_) { - this->ac_output_rating_voltage_->publish_state(value_ac_output_rating_voltage_); - } - if (this->ac_output_rating_frequency_) { - this->ac_output_rating_frequency_->publish_state(value_ac_output_rating_frequency_); - } - if (this->ac_output_rating_current_) { - this->ac_output_rating_current_->publish_state(value_ac_output_rating_current_); - } - if (this->ac_output_rating_apparent_power_) { - this->ac_output_rating_apparent_power_->publish_state(value_ac_output_rating_apparent_power_); - } - if (this->ac_output_rating_active_power_) { - this->ac_output_rating_active_power_->publish_state(value_ac_output_rating_active_power_); - } - if (this->battery_rating_voltage_) { - this->battery_rating_voltage_->publish_state(value_battery_rating_voltage_); - } - if (this->battery_recharge_voltage_) { - this->battery_recharge_voltage_->publish_state(value_battery_recharge_voltage_); - } - if (this->battery_under_voltage_) { - this->battery_under_voltage_->publish_state(value_battery_under_voltage_); - } - if (this->battery_bulk_voltage_) { - this->battery_bulk_voltage_->publish_state(value_battery_bulk_voltage_); - } - if (this->battery_float_voltage_) { - this->battery_float_voltage_->publish_state(value_battery_float_voltage_); - } - if (this->battery_type_) { - this->battery_type_->publish_state(value_battery_type_); - } - if (this->current_max_ac_charging_current_) { - this->current_max_ac_charging_current_->publish_state(value_current_max_ac_charging_current_); - } - if (this->current_max_charging_current_) { - this->current_max_charging_current_->publish_state(value_current_max_charging_current_); - } - if (this->input_voltage_range_) { - this->input_voltage_range_->publish_state(value_input_voltage_range_); - } - // special for input voltage range switch - if (this->input_voltage_range_switch_) { - this->input_voltage_range_switch_->publish_state(value_input_voltage_range_ == 1); - } - if (this->output_source_priority_) { - this->output_source_priority_->publish_state(value_output_source_priority_); - } - // special for output source priority switches - if (this->output_source_priority_utility_switch_) { - this->output_source_priority_utility_switch_->publish_state(value_output_source_priority_ == 0); - } - if (this->output_source_priority_solar_switch_) { - this->output_source_priority_solar_switch_->publish_state(value_output_source_priority_ == 1); - } - if (this->output_source_priority_battery_switch_) { - this->output_source_priority_battery_switch_->publish_state(value_output_source_priority_ == 2); - } - if (this->output_source_priority_hybrid_switch_) { - this->output_source_priority_hybrid_switch_->publish_state(value_output_source_priority_ == 3); - } - if (this->charger_source_priority_) { - this->charger_source_priority_->publish_state(value_charger_source_priority_); - } - if (this->parallel_max_num_) { - this->parallel_max_num_->publish_state(value_parallel_max_num_); - } - if (this->machine_type_) { - this->machine_type_->publish_state(value_machine_type_); - } - if (this->topology_) { - this->topology_->publish_state(value_topology_); - } - if (this->output_mode_) { - this->output_mode_->publish_state(value_output_mode_); - } - if (this->battery_redischarge_voltage_) { - this->battery_redischarge_voltage_->publish_state(value_battery_redischarge_voltage_); - } - if (this->pv_ok_condition_for_parallel_) { - this->pv_ok_condition_for_parallel_->publish_state(value_pv_ok_condition_for_parallel_); - } - // special for pv ok condition switch - if (this->pv_ok_condition_for_parallel_switch_) { - this->pv_ok_condition_for_parallel_switch_->publish_state(value_pv_ok_condition_for_parallel_ == 1); - } - if (this->pv_power_balance_) { - this->pv_power_balance_->publish_state(value_pv_power_balance_ == 1); - } - // special for power balance switch - if (this->pv_power_balance_switch_) { - this->pv_power_balance_switch_->publish_state(value_pv_power_balance_ == 1); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QPIGS: - if (this->grid_voltage_) { - this->grid_voltage_->publish_state(value_grid_voltage_); - } - if (this->grid_frequency_) { - this->grid_frequency_->publish_state(value_grid_frequency_); - } - if (this->ac_output_voltage_) { - this->ac_output_voltage_->publish_state(value_ac_output_voltage_); - } - if (this->ac_output_frequency_) { - this->ac_output_frequency_->publish_state(value_ac_output_frequency_); - } - if (this->ac_output_apparent_power_) { - this->ac_output_apparent_power_->publish_state(value_ac_output_apparent_power_); - } - if (this->ac_output_active_power_) { - this->ac_output_active_power_->publish_state(value_ac_output_active_power_); - } - if (this->output_load_percent_) { - this->output_load_percent_->publish_state(value_output_load_percent_); - } - if (this->bus_voltage_) { - this->bus_voltage_->publish_state(value_bus_voltage_); - } - if (this->battery_voltage_) { - this->battery_voltage_->publish_state(value_battery_voltage_); - } - if (this->battery_charging_current_) { - this->battery_charging_current_->publish_state(value_battery_charging_current_); - } - if (this->battery_capacity_percent_) { - this->battery_capacity_percent_->publish_state(value_battery_capacity_percent_); - } - if (this->inverter_heat_sink_temperature_) { - this->inverter_heat_sink_temperature_->publish_state(value_inverter_heat_sink_temperature_); - } - if (this->pv_input_current_for_battery_) { - this->pv_input_current_for_battery_->publish_state(value_pv_input_current_for_battery_); - } - if (this->pv_input_voltage_) { - this->pv_input_voltage_->publish_state(value_pv_input_voltage_); - } - if (this->battery_voltage_scc_) { - this->battery_voltage_scc_->publish_state(value_battery_voltage_scc_); - } - if (this->battery_discharge_current_) { - this->battery_discharge_current_->publish_state(value_battery_discharge_current_); - } - if (this->add_sbu_priority_version_) { - this->add_sbu_priority_version_->publish_state(value_add_sbu_priority_version_); - } - if (this->configuration_status_) { - this->configuration_status_->publish_state(value_configuration_status_); - } - if (this->scc_firmware_version_) { - this->scc_firmware_version_->publish_state(value_scc_firmware_version_); - } - if (this->load_status_) { - this->load_status_->publish_state(value_load_status_); - } - if (this->battery_voltage_to_steady_while_charging_) { - this->battery_voltage_to_steady_while_charging_->publish_state( - value_battery_voltage_to_steady_while_charging_); - } - if (this->charging_status_) { - this->charging_status_->publish_state(value_charging_status_); - } - if (this->scc_charging_status_) { - this->scc_charging_status_->publish_state(value_scc_charging_status_); - } - if (this->ac_charging_status_) { - this->ac_charging_status_->publish_state(value_ac_charging_status_); - } - if (this->battery_voltage_offset_for_fans_on_) { - this->battery_voltage_offset_for_fans_on_->publish_state(value_battery_voltage_offset_for_fans_on_ / 10.0f); - } //.1 scale - if (this->eeprom_version_) { - this->eeprom_version_->publish_state(value_eeprom_version_); - } - if (this->pv_charging_power_) { - this->pv_charging_power_->publish_state(value_pv_charging_power_); - } - if (this->charging_to_floating_mode_) { - this->charging_to_floating_mode_->publish_state(value_charging_to_floating_mode_); - } - if (this->switch_on_) { - this->switch_on_->publish_state(value_switch_on_); - } - if (this->dustproof_installed_) { - this->dustproof_installed_->publish_state(value_dustproof_installed_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QMOD: - if (this->device_mode_) { - mode = value_device_mode_; - this->device_mode_->publish_state(mode); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QFLAG: - if (this->silence_buzzer_open_buzzer_) { - this->silence_buzzer_open_buzzer_->publish_state(value_silence_buzzer_open_buzzer_); - } - if (this->overload_bypass_function_) { - this->overload_bypass_function_->publish_state(value_overload_bypass_function_); - } - if (this->lcd_escape_to_default_) { - this->lcd_escape_to_default_->publish_state(value_lcd_escape_to_default_); - } - if (this->overload_restart_function_) { - this->overload_restart_function_->publish_state(value_overload_restart_function_); - } - if (this->over_temperature_restart_function_) { - this->over_temperature_restart_function_->publish_state(value_over_temperature_restart_function_); - } - if (this->backlight_on_) { - this->backlight_on_->publish_state(value_backlight_on_); - } - if (this->alarm_on_when_primary_source_interrupt_) { - this->alarm_on_when_primary_source_interrupt_->publish_state(value_alarm_on_when_primary_source_interrupt_); - } - if (this->fault_code_record_) { - this->fault_code_record_->publish_state(value_fault_code_record_); - } - if (this->power_saving_) { - this->power_saving_->publish_state(value_power_saving_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QPIWS: - if (this->warnings_present_) { - this->warnings_present_->publish_state(value_warnings_present_); - } - if (this->faults_present_) { - this->faults_present_->publish_state(value_faults_present_); - } - if (this->warning_power_loss_) { - this->warning_power_loss_->publish_state(value_warning_power_loss_); - } - if (this->fault_inverter_fault_) { - this->fault_inverter_fault_->publish_state(value_fault_inverter_fault_); - } - if (this->fault_bus_over_) { - this->fault_bus_over_->publish_state(value_fault_bus_over_); - } - if (this->fault_bus_under_) { - this->fault_bus_under_->publish_state(value_fault_bus_under_); - } - if (this->fault_bus_soft_fail_) { - this->fault_bus_soft_fail_->publish_state(value_fault_bus_soft_fail_); - } - if (this->warning_line_fail_) { - this->warning_line_fail_->publish_state(value_warning_line_fail_); - } - if (this->fault_opvshort_) { - this->fault_opvshort_->publish_state(value_fault_opvshort_); - } - if (this->fault_inverter_voltage_too_low_) { - this->fault_inverter_voltage_too_low_->publish_state(value_fault_inverter_voltage_too_low_); - } - if (this->fault_inverter_voltage_too_high_) { - this->fault_inverter_voltage_too_high_->publish_state(value_fault_inverter_voltage_too_high_); - } - if (this->warning_over_temperature_) { - this->warning_over_temperature_->publish_state(value_warning_over_temperature_); - } - if (this->warning_fan_lock_) { - this->warning_fan_lock_->publish_state(value_warning_fan_lock_); - } - if (this->warning_battery_voltage_high_) { - this->warning_battery_voltage_high_->publish_state(value_warning_battery_voltage_high_); - } - if (this->warning_battery_low_alarm_) { - this->warning_battery_low_alarm_->publish_state(value_warning_battery_low_alarm_); - } - if (this->warning_battery_under_shutdown_) { - this->warning_battery_under_shutdown_->publish_state(value_warning_battery_under_shutdown_); - } - if (this->warning_battery_derating_) { - this->warning_battery_derating_->publish_state(value_warning_battery_derating_); - } - if (this->warning_over_load_) { - this->warning_over_load_->publish_state(value_warning_over_load_); - } - if (this->warning_eeprom_failed_) { - this->warning_eeprom_failed_->publish_state(value_warning_eeprom_failed_); - } - if (this->fault_inverter_over_current_) { - this->fault_inverter_over_current_->publish_state(value_fault_inverter_over_current_); - } - if (this->fault_inverter_soft_failed_) { - this->fault_inverter_soft_failed_->publish_state(value_fault_inverter_soft_failed_); - } - if (this->fault_self_test_failed_) { - this->fault_self_test_failed_->publish_state(value_fault_self_test_failed_); - } - if (this->fault_op_dc_voltage_over_) { - this->fault_op_dc_voltage_over_->publish_state(value_fault_op_dc_voltage_over_); - } - if (this->fault_battery_open_) { - this->fault_battery_open_->publish_state(value_fault_battery_open_); - } - if (this->fault_current_sensor_failed_) { - this->fault_current_sensor_failed_->publish_state(value_fault_current_sensor_failed_); - } - if (this->fault_battery_short_) { - this->fault_battery_short_->publish_state(value_fault_battery_short_); - } - if (this->warning_power_limit_) { - this->warning_power_limit_->publish_state(value_warning_power_limit_); - } - if (this->warning_pv_voltage_high_) { - this->warning_pv_voltage_high_->publish_state(value_warning_pv_voltage_high_); - } - if (this->fault_mppt_overload_) { - this->fault_mppt_overload_->publish_state(value_fault_mppt_overload_); - } - if (this->warning_mppt_overload_) { - this->warning_mppt_overload_->publish_state(value_warning_mppt_overload_); - } - if (this->warning_battery_too_low_to_charge_) { - this->warning_battery_too_low_to_charge_->publish_state(value_warning_battery_too_low_to_charge_); - } - if (this->fault_dc_dc_over_current_) { - this->fault_dc_dc_over_current_->publish_state(value_fault_dc_dc_over_current_); - } - if (this->fault_code_) { - this->fault_code_->publish_state(value_fault_code_); - } - if (this->warnung_low_pv_energy_) { - this->warnung_low_pv_energy_->publish_state(value_warnung_low_pv_energy_); - } - if (this->warning_high_ac_input_during_bus_soft_start_) { - this->warning_high_ac_input_during_bus_soft_start_->publish_state( - value_warning_high_ac_input_during_bus_soft_start_); - } - if (this->warning_battery_equalization_) { - this->warning_battery_equalization_->publish_state(value_warning_battery_equalization_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QT: - case POLLING_QMN: - this->state_ = STATE_IDLE; - break; - } - } - if (this->state_ == STATE_POLL_CHECKED) { - bool enabled = true; - std::string fc; - char tmp[PIPSOLAR_READ_BUFFER_LENGTH]; - sprintf(tmp, "%s", this->read_buffer_); - switch (this->used_polling_commands_[this->last_polling_command_].identifier) { + switch (this->enabled_polling_commands_[this->last_polling_command_].identifier) { case POLLING_QPIRI: ESP_LOGD(TAG, "Decode QPIRI"); - sscanf(tmp, "(%f %f %f %f %f %d %d %f %f %f %f %f %d %d %d %d %d %d %d %d %d %d %f %d %d", // NOLINT - &value_grid_rating_voltage_, &value_grid_rating_current_, &value_ac_output_rating_voltage_, // NOLINT - &value_ac_output_rating_frequency_, &value_ac_output_rating_current_, // NOLINT - &value_ac_output_rating_apparent_power_, &value_ac_output_rating_active_power_, // NOLINT - &value_battery_rating_voltage_, &value_battery_recharge_voltage_, // NOLINT - &value_battery_under_voltage_, &value_battery_bulk_voltage_, &value_battery_float_voltage_, // NOLINT - &value_battery_type_, &value_current_max_ac_charging_current_, // NOLINT - &value_current_max_charging_current_, &value_input_voltage_range_, // NOLINT - &value_output_source_priority_, &value_charger_source_priority_, &value_parallel_max_num_, // NOLINT - &value_machine_type_, &value_topology_, &value_output_mode_, // NOLINT - &value_battery_redischarge_voltage_, &value_pv_ok_condition_for_parallel_, // NOLINT - &value_pv_power_balance_); // NOLINT - if (this->last_qpiri_) { - this->last_qpiri_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpiri_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QPIGS: ESP_LOGD(TAG, "Decode QPIGS"); - sscanf( // NOLINT - tmp, // NOLINT - "(%f %f %f %f %d %d %d %d %f %d %d %d %f %f %f %d %1d%1d%1d%1d%1d%1d%1d%1d %d %d %d %1d%1d%1d", // NOLINT - &value_grid_voltage_, &value_grid_frequency_, &value_ac_output_voltage_, // NOLINT - &value_ac_output_frequency_, // NOLINT - &value_ac_output_apparent_power_, &value_ac_output_active_power_, &value_output_load_percent_, // NOLINT - &value_bus_voltage_, &value_battery_voltage_, &value_battery_charging_current_, // NOLINT - &value_battery_capacity_percent_, &value_inverter_heat_sink_temperature_, // NOLINT - &value_pv_input_current_for_battery_, &value_pv_input_voltage_, &value_battery_voltage_scc_, // NOLINT - &value_battery_discharge_current_, &value_add_sbu_priority_version_, // NOLINT - &value_configuration_status_, &value_scc_firmware_version_, &value_load_status_, // NOLINT - &value_battery_voltage_to_steady_while_charging_, &value_charging_status_, // NOLINT - &value_scc_charging_status_, &value_ac_charging_status_, // NOLINT - &value_battery_voltage_offset_for_fans_on_, &value_eeprom_version_, &value_pv_charging_power_, // NOLINT - &value_charging_to_floating_mode_, &value_switch_on_, // NOLINT - &value_dustproof_installed_); // NOLINT - if (this->last_qpigs_) { - this->last_qpigs_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpigs_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QMOD: ESP_LOGD(TAG, "Decode QMOD"); - this->value_device_mode_ = char(this->read_buffer_[1]); - if (this->last_qmod_) { - this->last_qmod_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qmod_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QFLAG: ESP_LOGD(TAG, "Decode QFLAG"); - // result like:"(EbkuvxzDajy" - // get through all char: ignore first "(" Enable flag on 'E', Disable on 'D') else set the corresponding value - for (size_t i = 1; i < strlen(tmp); i++) { - switch (tmp[i]) { - case 'E': - enabled = true; - break; - case 'D': - enabled = false; - break; - case 'a': - this->value_silence_buzzer_open_buzzer_ = enabled; - break; - case 'b': - this->value_overload_bypass_function_ = enabled; - break; - case 'k': - this->value_lcd_escape_to_default_ = enabled; - break; - case 'u': - this->value_overload_restart_function_ = enabled; - break; - case 'v': - this->value_over_temperature_restart_function_ = enabled; - break; - case 'x': - this->value_backlight_on_ = enabled; - break; - case 'y': - this->value_alarm_on_when_primary_source_interrupt_ = enabled; - break; - case 'z': - this->value_fault_code_record_ = enabled; - break; - case 'j': - this->value_power_saving_ = enabled; - break; - } - } - if (this->last_qflag_) { - this->last_qflag_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qflag_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QPIWS: ESP_LOGD(TAG, "Decode QPIWS"); - // '(00000000000000000000000000000000' - // iterate over all available flag (as not all models have all flags, but at least in the same order) - this->value_warnings_present_ = false; - this->value_faults_present_ = false; - - for (size_t i = 1; i < strlen(tmp); i++) { - enabled = tmp[i] == '1'; - switch (i) { - case 1: - this->value_warning_power_loss_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 2: - this->value_fault_inverter_fault_ = enabled; - this->value_faults_present_ += enabled; - break; - case 3: - this->value_fault_bus_over_ = enabled; - this->value_faults_present_ += enabled; - break; - case 4: - this->value_fault_bus_under_ = enabled; - this->value_faults_present_ += enabled; - break; - case 5: - this->value_fault_bus_soft_fail_ = enabled; - this->value_faults_present_ += enabled; - break; - case 6: - this->value_warning_line_fail_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 7: - this->value_fault_opvshort_ = enabled; - this->value_faults_present_ += enabled; - break; - case 8: - this->value_fault_inverter_voltage_too_low_ = enabled; - this->value_faults_present_ += enabled; - break; - case 9: - this->value_fault_inverter_voltage_too_high_ = enabled; - this->value_faults_present_ += enabled; - break; - case 10: - this->value_warning_over_temperature_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 11: - this->value_warning_fan_lock_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 12: - this->value_warning_battery_voltage_high_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 13: - this->value_warning_battery_low_alarm_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 15: - this->value_warning_battery_under_shutdown_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 16: - this->value_warning_battery_derating_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 17: - this->value_warning_over_load_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 18: - this->value_warning_eeprom_failed_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 19: - this->value_fault_inverter_over_current_ = enabled; - this->value_faults_present_ += enabled; - break; - case 20: - this->value_fault_inverter_soft_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 21: - this->value_fault_self_test_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 22: - this->value_fault_op_dc_voltage_over_ = enabled; - this->value_faults_present_ += enabled; - break; - case 23: - this->value_fault_battery_open_ = enabled; - this->value_faults_present_ += enabled; - break; - case 24: - this->value_fault_current_sensor_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 25: - this->value_fault_battery_short_ = enabled; - this->value_faults_present_ += enabled; - break; - case 26: - this->value_warning_power_limit_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 27: - this->value_warning_pv_voltage_high_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 28: - this->value_fault_mppt_overload_ = enabled; - this->value_faults_present_ += enabled; - break; - case 29: - this->value_warning_mppt_overload_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 30: - this->value_warning_battery_too_low_to_charge_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 31: - this->value_fault_dc_dc_over_current_ = enabled; - this->value_faults_present_ += enabled; - break; - case 32: - fc = tmp[i]; - fc += tmp[i + 1]; - this->value_fault_code_ = parse_number(fc).value_or(0); - break; - case 34: - this->value_warnung_low_pv_energy_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 35: - this->value_warning_high_ac_input_during_bus_soft_start_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 36: - this->value_warning_battery_equalization_ = enabled; - this->value_warnings_present_ += enabled; - break; - } - } - if (this->last_qpiws_) { - this->last_qpiws_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpiws_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QT: ESP_LOGD(TAG, "Decode QT"); - if (this->last_qt_) { - this->last_qt_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qt_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QMN: ESP_LOGD(TAG, "Decode QMN"); - if (this->last_qmn_) { - this->last_qmn_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qmn_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; default: this->state_ = STATE_IDLE; @@ -706,7 +117,7 @@ void Pipsolar::loop() { return; } // crc ok - this->used_polling_commands_[this->last_polling_command_].needs_update = false; + this->enabled_polling_commands_[this->last_polling_command_].needs_update = false; this->state_ = STATE_POLL_CHECKED; return; } else { @@ -719,9 +130,12 @@ void Pipsolar::loop() { uint8_t byte; this->read_byte(&byte); - if (this->read_pos_ == PIPSOLAR_READ_BUFFER_LENGTH) { + // make sure data and null terminator fit in buffer + if (this->read_pos_ >= PIPSOLAR_READ_BUFFER_LENGTH - 1) { this->read_pos_ = 0; this->empty_uart_buffer_(); + ESP_LOGW(TAG, "response data too long, discarding."); + break; } this->read_buffer_[this->read_pos_] = byte; this->read_pos_++; @@ -755,7 +169,8 @@ void Pipsolar::loop() { if (this->state_ == STATE_POLL) { if (millis() - this->command_start_millis_ > esphome::pipsolar::Pipsolar::COMMAND_TIMEOUT) { // command timeout - ESP_LOGD(TAG, "timeout command to poll: %s", this->used_polling_commands_[this->last_polling_command_].command); + ESP_LOGD(TAG, "timeout command to poll: %s", + this->enabled_polling_commands_[this->last_polling_command_].command); this->state_ = STATE_IDLE; } else { } @@ -786,7 +201,7 @@ uint8_t Pipsolar::check_incoming_crc_() { return 0; } -// send next command used +// send next command from queue bool Pipsolar::send_next_command_() { uint16_t crc16; if (!this->command_queue_[this->command_queue_position_].empty()) { @@ -815,14 +230,13 @@ bool Pipsolar::send_next_command_() { bool Pipsolar::send_next_poll_() { uint16_t crc16; - for (uint8_t i = 0; i < POLLING_COMMANDS_MAX; i++) { this->last_polling_command_ = (this->last_polling_command_ + 1) % POLLING_COMMANDS_MAX; - if (this->used_polling_commands_[this->last_polling_command_].length == 0) { + if (this->enabled_polling_commands_[this->last_polling_command_].length == 0) { // not enabled continue; } - if (!this->used_polling_commands_[this->last_polling_command_].needs_update) { + if (!this->enabled_polling_commands_[this->last_polling_command_].needs_update) { // no update requested continue; } @@ -830,79 +244,530 @@ bool Pipsolar::send_next_poll_() { this->command_start_millis_ = millis(); this->empty_uart_buffer_(); this->read_pos_ = 0; - crc16 = this->pipsolar_crc_(this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); - this->write_array(this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); + crc16 = this->pipsolar_crc_(this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); + this->write_array(this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); // checksum this->write(((uint8_t) ((crc16) >> 8))); // highbyte this->write(((uint8_t) ((crc16) &0xff))); // lowbyte // end Byte this->write(0x0D); ESP_LOGD(TAG, "Sending polling command : %s with length %d", - this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); + this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); return true; } return false; } -void Pipsolar::queue_command_(const char *command, uint8_t length) { +void Pipsolar::queue_command(const std::string &command) { uint8_t next_position = command_queue_position_; for (uint8_t i = 0; i < COMMAND_QUEUE_LENGTH; i++) { uint8_t testposition = (next_position + i) % COMMAND_QUEUE_LENGTH; if (command_queue_[testposition].empty()) { command_queue_[testposition] = command; - ESP_LOGD(TAG, "Command queued successfully: %s with length %u at position %d", command, - command_queue_[testposition].length(), testposition); + ESP_LOGD(TAG, "Command queued successfully: %s at position %d", command.c_str(), testposition); return; } } - ESP_LOGD(TAG, "Command queue full dropping command: %s", command); + ESP_LOGD(TAG, "Command queue full dropping command: %s", command.c_str()); } -void Pipsolar::switch_command(const std::string &command) { - ESP_LOGD(TAG, "got command: %s", command.c_str()); - queue_command_(command.c_str(), command.length()); +void Pipsolar::handle_qpiri_(const char *message) { + if (this->last_qpiri_) { + this->last_qpiri_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + + this->read_float_sensor_(message, &pos, this->grid_rating_voltage_); + this->read_float_sensor_(message, &pos, this->grid_rating_current_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_voltage_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_frequency_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_current_); + + this->read_int_sensor_(message, &pos, this->ac_output_rating_apparent_power_); + this->read_int_sensor_(message, &pos, this->ac_output_rating_active_power_); + + this->read_float_sensor_(message, &pos, this->battery_rating_voltage_); + this->read_float_sensor_(message, &pos, this->battery_recharge_voltage_); + this->read_float_sensor_(message, &pos, this->battery_under_voltage_); + this->read_float_sensor_(message, &pos, this->battery_bulk_voltage_); + this->read_float_sensor_(message, &pos, this->battery_float_voltage_); + + this->read_int_sensor_(message, &pos, this->battery_type_); + this->read_int_sensor_(message, &pos, this->current_max_ac_charging_current_); + this->read_int_sensor_(message, &pos, this->current_max_charging_current_); + + esphome::optional input_voltage_range = parse_number(this->read_field_(message, &pos)); + esphome::optional output_source_priority = parse_number(this->read_field_(message, &pos)); + + this->read_int_sensor_(message, &pos, this->charger_source_priority_); + this->read_int_sensor_(message, &pos, this->parallel_max_num_); + this->read_int_sensor_(message, &pos, this->machine_type_); + this->read_int_sensor_(message, &pos, this->topology_); + this->read_int_sensor_(message, &pos, this->output_mode_); + + this->read_float_sensor_(message, &pos, this->battery_redischarge_voltage_); + + esphome::optional pv_ok_condition_for_parallel = parse_number(this->read_field_(message, &pos)); + esphome::optional pv_power_balance = parse_number(this->read_field_(message, &pos)); + + if (this->input_voltage_range_) { + this->input_voltage_range_->publish_state(input_voltage_range.value_or(NAN)); + } + // special for input voltage range switch + if (this->input_voltage_range_switch_ && input_voltage_range.has_value()) { + this->input_voltage_range_switch_->publish_state(input_voltage_range.value() == 1); + } + + if (this->output_source_priority_) { + this->output_source_priority_->publish_state(output_source_priority.value_or(NAN)); + } + // special for output source priority switches + if (this->output_source_priority_utility_switch_ && output_source_priority.has_value()) { + this->output_source_priority_utility_switch_->publish_state(output_source_priority.value() == 0); + } + if (this->output_source_priority_solar_switch_ && output_source_priority.has_value()) { + this->output_source_priority_solar_switch_->publish_state(output_source_priority.value() == 1); + } + if (this->output_source_priority_battery_switch_ && output_source_priority.has_value()) { + this->output_source_priority_battery_switch_->publish_state(output_source_priority.value() == 2); + } + if (this->output_source_priority_hybrid_switch_ && output_source_priority.has_value()) { + this->output_source_priority_hybrid_switch_->publish_state(output_source_priority.value() == 3); + } + + if (this->pv_ok_condition_for_parallel_) { + this->pv_ok_condition_for_parallel_->publish_state(pv_ok_condition_for_parallel.value_or(NAN)); + } + // special for pv ok condition switch + if (this->pv_ok_condition_for_parallel_switch_ && pv_ok_condition_for_parallel.has_value()) { + this->pv_ok_condition_for_parallel_switch_->publish_state(pv_ok_condition_for_parallel.value() == 1); + } + + if (this->pv_power_balance_) { + this->pv_power_balance_->publish_state(pv_power_balance.value_or(NAN)); + } + // special for power balance switch + if (this->pv_power_balance_switch_ && pv_power_balance.has_value()) { + this->pv_power_balance_switch_->publish_state(pv_power_balance.value() == 1); + } } + +void Pipsolar::handle_qpigs_(const char *message) { + if (this->last_qpigs_) { + this->last_qpigs_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + + this->read_float_sensor_(message, &pos, this->grid_voltage_); + this->read_float_sensor_(message, &pos, this->grid_frequency_); + this->read_float_sensor_(message, &pos, this->ac_output_voltage_); + this->read_float_sensor_(message, &pos, this->ac_output_frequency_); + + this->read_int_sensor_(message, &pos, this->ac_output_apparent_power_); + this->read_int_sensor_(message, &pos, this->ac_output_active_power_); + this->read_int_sensor_(message, &pos, this->output_load_percent_); + this->read_int_sensor_(message, &pos, this->bus_voltage_); + + this->read_float_sensor_(message, &pos, this->battery_voltage_); + + this->read_int_sensor_(message, &pos, this->battery_charging_current_); + this->read_int_sensor_(message, &pos, this->battery_capacity_percent_); + this->read_int_sensor_(message, &pos, this->inverter_heat_sink_temperature_); + + this->read_float_sensor_(message, &pos, this->pv_input_current_for_battery_); + this->read_float_sensor_(message, &pos, this->pv_input_voltage_); + this->read_float_sensor_(message, &pos, this->battery_voltage_scc_); + + this->read_int_sensor_(message, &pos, this->battery_discharge_current_); + + std::string device_status_1 = this->read_field_(message, &pos); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 0), this->add_sbu_priority_version_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 1), this->configuration_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 2), this->scc_firmware_version_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 3), this->load_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 4), this->battery_voltage_to_steady_while_charging_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 5), this->charging_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 6), this->scc_charging_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 7), this->ac_charging_status_); + + esphome::optional battery_voltage_offset_for_fans_on = parse_number(this->read_field_(message, &pos)); + if (this->battery_voltage_offset_for_fans_on_) { + this->battery_voltage_offset_for_fans_on_->publish_state(battery_voltage_offset_for_fans_on.value_or(NAN) / 10.0f); + } + this->read_int_sensor_(message, &pos, this->eeprom_version_); + this->read_int_sensor_(message, &pos, this->pv_charging_power_); + + std::string device_status_2 = this->read_field_(message, &pos); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 0), this->charging_to_floating_mode_); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 1), this->switch_on_); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 2), this->dustproof_installed_); +} + +void Pipsolar::handle_qmod_(const char *message) { + std::string mode; + char device_mode = char(message[1]); + if (this->last_qmod_) { + this->last_qmod_->publish_state(message); + } + if (this->device_mode_) { + mode = device_mode; + this->device_mode_->publish_state(mode); + } +} + +void Pipsolar::handle_qflag_(const char *message) { + // result like:"(EbkuvxzDajy" + // get through all char: ignore first "(" Enable flag on 'E', Disable on 'D') else set the corresponding value + if (this->last_qflag_) { + this->last_qflag_->publish_state(message); + } + + QFLAGValues values = QFLAGValues(); + bool enabled = true; + for (size_t i = 1; i < strlen(message); i++) { + switch (message[i]) { + case 'E': + enabled = true; + break; + case 'D': + enabled = false; + break; + case 'a': + values.silence_buzzer_open_buzzer = enabled; + break; + case 'b': + values.overload_bypass_function = enabled; + break; + case 'k': + values.lcd_escape_to_default = enabled; + break; + case 'u': + values.overload_restart_function = enabled; + break; + case 'v': + values.over_temperature_restart_function = enabled; + break; + case 'x': + values.backlight_on = enabled; + break; + case 'y': + values.alarm_on_when_primary_source_interrupt = enabled; + break; + case 'z': + values.fault_code_record = enabled; + break; + case 'j': + values.power_saving = enabled; + break; + } + } + + this->publish_binary_sensor_(values.silence_buzzer_open_buzzer, this->silence_buzzer_open_buzzer_); + this->publish_binary_sensor_(values.overload_bypass_function, this->overload_bypass_function_); + this->publish_binary_sensor_(values.lcd_escape_to_default, this->lcd_escape_to_default_); + this->publish_binary_sensor_(values.overload_restart_function, this->overload_restart_function_); + this->publish_binary_sensor_(values.over_temperature_restart_function, this->over_temperature_restart_function_); + this->publish_binary_sensor_(values.backlight_on, this->backlight_on_); + this->publish_binary_sensor_(values.alarm_on_when_primary_source_interrupt, + this->alarm_on_when_primary_source_interrupt_); + this->publish_binary_sensor_(values.fault_code_record, this->fault_code_record_); + this->publish_binary_sensor_(values.power_saving, this->power_saving_); +} + +void Pipsolar::handle_qpiws_(const char *message) { + // '(00000000000000000000000000000000' + // iterate over all available flag (as not all models have all flags, but at least in the same order) + if (this->last_qpiws_) { + this->last_qpiws_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + std::string flags = this->read_field_(message, &pos); + + esphome::optional enabled; + bool value_warnings_present = false; + bool value_faults_present = false; + + for (size_t i = 0; i < 36; i++) { + if (i == 31 || i == 32) { + // special case for fault code + continue; + } + enabled = this->get_bit_(flags, i); + switch (i) { + case 0: + this->publish_binary_sensor_(enabled, this->warning_power_loss_); + value_warnings_present |= enabled.value_or(false); + break; + case 1: + this->publish_binary_sensor_(enabled, this->fault_inverter_fault_); + value_faults_present |= enabled.value_or(false); + break; + case 2: + this->publish_binary_sensor_(enabled, this->fault_bus_over_); + value_faults_present |= enabled.value_or(false); + break; + case 3: + this->publish_binary_sensor_(enabled, this->fault_bus_under_); + value_faults_present |= enabled.value_or(false); + break; + case 4: + this->publish_binary_sensor_(enabled, this->fault_bus_soft_fail_); + value_faults_present |= enabled.value_or(false); + break; + case 5: + this->publish_binary_sensor_(enabled, this->warning_line_fail_); + value_warnings_present |= enabled.value_or(false); + break; + case 6: + this->publish_binary_sensor_(enabled, this->fault_opvshort_); + value_faults_present |= enabled.value_or(false); + break; + case 7: + this->publish_binary_sensor_(enabled, this->fault_inverter_voltage_too_low_); + value_faults_present |= enabled.value_or(false); + break; + case 8: + this->publish_binary_sensor_(enabled, this->fault_inverter_voltage_too_high_); + value_faults_present |= enabled.value_or(false); + break; + case 9: + this->publish_binary_sensor_(enabled, this->warning_over_temperature_); + value_warnings_present |= enabled.value_or(false); + break; + case 10: + this->publish_binary_sensor_(enabled, this->warning_fan_lock_); + value_warnings_present |= enabled.value_or(false); + break; + case 11: + this->publish_binary_sensor_(enabled, this->warning_battery_voltage_high_); + value_warnings_present |= enabled.value_or(false); + break; + case 12: + this->publish_binary_sensor_(enabled, this->warning_battery_low_alarm_); + value_warnings_present |= enabled.value_or(false); + break; + case 14: + this->publish_binary_sensor_(enabled, this->warning_battery_under_shutdown_); + value_warnings_present |= enabled.value_or(false); + break; + case 15: + this->publish_binary_sensor_(enabled, this->warning_battery_derating_); + value_warnings_present |= enabled.value_or(false); + break; + case 16: + this->publish_binary_sensor_(enabled, this->warning_over_load_); + value_warnings_present |= enabled.value_or(false); + break; + case 17: + this->publish_binary_sensor_(enabled, this->warning_eeprom_failed_); + value_warnings_present |= enabled.value_or(false); + break; + case 18: + this->publish_binary_sensor_(enabled, this->fault_inverter_over_current_); + value_faults_present |= enabled.value_or(false); + break; + case 19: + this->publish_binary_sensor_(enabled, this->fault_inverter_soft_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 20: + this->publish_binary_sensor_(enabled, this->fault_self_test_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 21: + this->publish_binary_sensor_(enabled, this->fault_op_dc_voltage_over_); + value_faults_present |= enabled.value_or(false); + break; + case 22: + this->publish_binary_sensor_(enabled, this->fault_battery_open_); + value_faults_present |= enabled.value_or(false); + break; + case 23: + this->publish_binary_sensor_(enabled, this->fault_current_sensor_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 24: + this->publish_binary_sensor_(enabled, this->fault_battery_short_); + value_faults_present |= enabled.value_or(false); + break; + case 25: + this->publish_binary_sensor_(enabled, this->warning_power_limit_); + value_warnings_present |= enabled.value_or(false); + break; + case 26: + this->publish_binary_sensor_(enabled, this->warning_pv_voltage_high_); + value_warnings_present |= enabled.value_or(false); + break; + case 27: + this->publish_binary_sensor_(enabled, this->fault_mppt_overload_); + value_faults_present |= enabled.value_or(false); + break; + case 28: + this->publish_binary_sensor_(enabled, this->warning_mppt_overload_); + value_warnings_present |= enabled.value_or(false); + break; + case 29: + this->publish_binary_sensor_(enabled, this->warning_battery_too_low_to_charge_); + value_warnings_present |= enabled.value_or(false); + break; + case 30: + this->publish_binary_sensor_(enabled, this->fault_dc_dc_over_current_); + value_faults_present |= enabled.value_or(false); + break; + case 33: + this->publish_binary_sensor_(enabled, this->warning_low_pv_energy_); + value_warnings_present |= enabled.value_or(false); + break; + case 34: + this->publish_binary_sensor_(enabled, this->warning_high_ac_input_during_bus_soft_start_); + value_warnings_present |= enabled.value_or(false); + case 35: + this->publish_binary_sensor_(enabled, this->warning_battery_equalization_); + value_warnings_present |= enabled.value_or(false); + break; + } + } + + this->publish_binary_sensor_(value_warnings_present, this->warnings_present_); + this->publish_binary_sensor_(value_faults_present, this->faults_present_); + + if (this->fault_code_) { + if (flags.length() < 33) { + this->fault_code_->publish_state(NAN); + } else { + std::string fc(flags, 31, 2); + this->fault_code_->publish_state(parse_number(fc).value_or(NAN)); + } + } +} + +void Pipsolar::handle_qt_(const char *message) { + if (this->last_qt_) { + this->last_qt_->publish_state(message); + } +} + +void Pipsolar::handle_qmn_(const char *message) { + if (this->last_qmn_) { + this->last_qmn_->publish_state(message); + } +} + +void Pipsolar::skip_start_(const char *message, size_t *pos) { + if (message[*pos] == '(') { + (*pos)++; + } +} +void Pipsolar::skip_field_(const char *message, size_t *pos) { + // find delimiter or end of string + while (message[*pos] != '\0' && message[*pos] != ' ') { + (*pos)++; + } + if (message[*pos] != '\0') { + // skip delimiter after this field if there is one + (*pos)++; + } +} +std::string Pipsolar::read_field_(const char *message, size_t *pos) { + size_t begin = *pos; + // find delimiter or end of string + while (message[*pos] != '\0' && message[*pos] != ' ') { + (*pos)++; + } + if (*pos == begin) { + return ""; + } + + std::string field(message, begin, *pos - begin); + + if (message[*pos] != '\0') { + // skip delimiter after this field if there is one + (*pos)++; + } + + return field; +} + +void Pipsolar::read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor) { + if (sensor != nullptr) { + std::string field = this->read_field_(message, pos); + sensor->publish_state(parse_number(field).value_or(NAN)); + } else { + this->skip_field_(message, pos); + } +} +void Pipsolar::read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor) { + if (sensor != nullptr) { + std::string field = this->read_field_(message, pos); + esphome::optional parsed = parse_number(field); + sensor->publish_state(parsed.has_value() ? parsed.value() : NAN); + } else { + this->skip_field_(message, pos); + } +} + +void Pipsolar::publish_binary_sensor_(esphome::optional b, binary_sensor::BinarySensor *sensor) { + if (sensor) { + if (b.has_value()) { + sensor->publish_state(b.value()); + } else { + sensor->invalidate_state(); + } + } +} + +esphome::optional Pipsolar::get_bit_(std::string bits, uint8_t bit_pos) { + if (bit_pos >= bits.length()) { + return {}; + } + return bits[bit_pos] == '1'; +} + void Pipsolar::dump_config() { ESP_LOGCONFIG(TAG, "Pipsolar:\n" - "used commands:"); - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length != 0) { - ESP_LOGCONFIG(TAG, "%s", used_polling_command.command); + "enabled polling commands:"); + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length != 0) { + ESP_LOGCONFIG(TAG, "%s", enabled_polling_command.command); } } } void Pipsolar::update() { - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length != 0) { - used_polling_command.needs_update = true; + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length != 0) { + enabled_polling_command.needs_update = true; } } } void Pipsolar::add_polling_command_(const char *command, ENUMPollingCommand polling_command) { - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length == strlen(command)) { + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length == strlen(command)) { uint8_t len = strlen(command); - if (memcmp(used_polling_command.command, command, len) == 0) { + if (memcmp(enabled_polling_command.command, command, len) == 0) { return; } } - if (used_polling_command.length == 0) { - size_t length = strlen(command) + 1; - const char *beg = command; - const char *end = command + length; - used_polling_command.command = new uint8_t[length]; // NOLINT(cppcoreguidelines-owning-memory) - size_t i = 0; - for (; beg != end; ++beg, ++i) { - used_polling_command.command[i] = (uint8_t) (*beg); + if (enabled_polling_command.length == 0) { + size_t length = strlen(command); + + enabled_polling_command.command = new uint8_t[length + 1]; // NOLINT(cppcoreguidelines-owning-memory) + for (size_t i = 0; i < length + 1; i++) { + enabled_polling_command.command[i] = (uint8_t) command[i]; } - used_polling_command.errors = 0; - used_polling_command.identifier = polling_command; - used_polling_command.length = length - 1; - used_polling_command.needs_update = true; + enabled_polling_command.errors = 0; + enabled_polling_command.identifier = polling_command; + enabled_polling_command.length = length; + enabled_polling_command.needs_update = true; return; } } diff --git a/esphome/components/pipsolar/pipsolar.h b/esphome/components/pipsolar/pipsolar.h index 77b18badb9..40056bac9d 100644 --- a/esphome/components/pipsolar/pipsolar.h +++ b/esphome/components/pipsolar/pipsolar.h @@ -7,6 +7,7 @@ #include "esphome/components/uart/uart.h" #include "esphome/core/automation.h" #include "esphome/core/component.h" +#include "esphome/core/helpers.h" namespace esphome { namespace pipsolar { @@ -28,10 +29,17 @@ struct PollingCommand { bool needs_update; }; -#define PIPSOLAR_VALUED_ENTITY_(type, name, polling_command, value_type) \ - protected: \ - value_type value_##name##_; \ - PIPSOLAR_ENTITY_(type, name, polling_command) +struct QFLAGValues { + esphome::optional silence_buzzer_open_buzzer; + esphome::optional overload_bypass_function; + esphome::optional lcd_escape_to_default; + esphome::optional overload_restart_function; + esphome::optional over_temperature_restart_function; + esphome::optional backlight_on; + esphome::optional alarm_on_when_primary_source_interrupt; + esphome::optional fault_code_record; + esphome::optional power_saving; +}; #define PIPSOLAR_ENTITY_(type, name, polling_command) \ protected: \ @@ -43,126 +51,123 @@ struct PollingCommand { this->add_polling_command_(#polling_command, POLLING_##polling_command); \ } -#define PIPSOLAR_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(sensor::Sensor, name, polling_command, value_type) +#define PIPSOLAR_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(sensor::Sensor, name, polling_command) #define PIPSOLAR_SWITCH(name, polling_command) PIPSOLAR_ENTITY_(switch_::Switch, name, polling_command) -#define PIPSOLAR_BINARY_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(binary_sensor::BinarySensor, name, polling_command, value_type) -#define PIPSOLAR_VALUED_TEXT_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(text_sensor::TextSensor, name, polling_command, value_type) +#define PIPSOLAR_BINARY_SENSOR(name, polling_command) \ + PIPSOLAR_ENTITY_(binary_sensor::BinarySensor, name, polling_command) #define PIPSOLAR_TEXT_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(text_sensor::TextSensor, name, polling_command) class Pipsolar : public uart::UARTDevice, public PollingComponent { // QPIGS values - PIPSOLAR_SENSOR(grid_voltage, QPIGS, float) - PIPSOLAR_SENSOR(grid_frequency, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_voltage, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_frequency, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS, int) - PIPSOLAR_SENSOR(ac_output_active_power, QPIGS, int) - PIPSOLAR_SENSOR(output_load_percent, QPIGS, int) - PIPSOLAR_SENSOR(bus_voltage, QPIGS, int) - PIPSOLAR_SENSOR(battery_voltage, QPIGS, float) - PIPSOLAR_SENSOR(battery_charging_current, QPIGS, int) - PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS, int) - PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS, int) - PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS, float) - PIPSOLAR_SENSOR(pv_input_voltage, QPIGS, float) - PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS, float) - PIPSOLAR_SENSOR(battery_discharge_current, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(load_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS, int) - PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS, int) //.1 scale - PIPSOLAR_SENSOR(eeprom_version, QPIGS, int) - PIPSOLAR_SENSOR(pv_charging_power, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS, int) + PIPSOLAR_SENSOR(grid_voltage, QPIGS) + PIPSOLAR_SENSOR(grid_frequency, QPIGS) + PIPSOLAR_SENSOR(ac_output_voltage, QPIGS) + PIPSOLAR_SENSOR(ac_output_frequency, QPIGS) + PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS) + PIPSOLAR_SENSOR(ac_output_active_power, QPIGS) + PIPSOLAR_SENSOR(output_load_percent, QPIGS) + PIPSOLAR_SENSOR(bus_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_charging_current, QPIGS) + PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS) + PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS) + PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS) + PIPSOLAR_SENSOR(pv_input_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS) + PIPSOLAR_SENSOR(battery_discharge_current, QPIGS) + PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS) + PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS) + PIPSOLAR_BINARY_SENSOR(load_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS) + PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS) + PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS) //.1 scale + PIPSOLAR_SENSOR(eeprom_version, QPIGS) + PIPSOLAR_SENSOR(pv_charging_power, QPIGS) + PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS) + PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS) + PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS) // QPIRI values - PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(grid_rating_current, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI, int) - PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI, int) - PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_under_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_float_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_type, QPIRI, int) - PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI, int) - PIPSOLAR_SENSOR(current_max_charging_current, QPIRI, int) - PIPSOLAR_SENSOR(input_voltage_range, QPIRI, int) - PIPSOLAR_SENSOR(output_source_priority, QPIRI, int) - PIPSOLAR_SENSOR(charger_source_priority, QPIRI, int) - PIPSOLAR_SENSOR(parallel_max_num, QPIRI, int) - PIPSOLAR_SENSOR(machine_type, QPIRI, int) - PIPSOLAR_SENSOR(topology, QPIRI, int) - PIPSOLAR_SENSOR(output_mode, QPIRI, int) - PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI, float) - PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI, int) - PIPSOLAR_SENSOR(pv_power_balance, QPIRI, int) + PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(grid_rating_current, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI) + PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_under_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_float_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_type, QPIRI) + PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI) + PIPSOLAR_SENSOR(current_max_charging_current, QPIRI) + PIPSOLAR_SENSOR(input_voltage_range, QPIRI) + PIPSOLAR_SENSOR(output_source_priority, QPIRI) + PIPSOLAR_SENSOR(charger_source_priority, QPIRI) + PIPSOLAR_SENSOR(parallel_max_num, QPIRI) + PIPSOLAR_SENSOR(machine_type, QPIRI) + PIPSOLAR_SENSOR(topology, QPIRI) + PIPSOLAR_SENSOR(output_mode, QPIRI) + PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI) + PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI) + PIPSOLAR_SENSOR(pv_power_balance, QPIRI) // QMOD values - PIPSOLAR_VALUED_TEXT_SENSOR(device_mode, QMOD, char) + PIPSOLAR_TEXT_SENSOR(device_mode, QMOD) // QFLAG values - PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG, int) + PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG) + PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG) + PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG) + PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG) + PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG) + PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG) // QPIWS values - PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS, int) - PIPSOLAR_BINARY_SENSOR(warnung_low_pv_energy, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS, bool) + PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS) + PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_low_pv_energy, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS) PIPSOLAR_TEXT_SENSOR(last_qpigs, QPIGS) PIPSOLAR_TEXT_SENSOR(last_qpiri, QPIRI) @@ -180,14 +185,14 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { PIPSOLAR_SWITCH(pv_ok_condition_for_parallel_switch, QPIRI) PIPSOLAR_SWITCH(pv_power_balance_switch, QPIRI) - void switch_command(const std::string &command); + void queue_command(const std::string &command); void setup() override; void loop() override; void dump_config() override; void update() override; protected: - static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 110; // maximum supported answer length + static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 128; // maximum supported answer length static const size_t COMMAND_QUEUE_LENGTH = 10; static const size_t COMMAND_TIMEOUT = 5000; static const size_t POLLING_COMMANDS_MAX = 15; @@ -198,7 +203,26 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { uint16_t pipsolar_crc_(uint8_t *msg, uint8_t len); bool send_next_command_(); bool send_next_poll_(); - void queue_command_(const char *command, uint8_t length); + + void handle_qpiri_(const char *message); + void handle_qpigs_(const char *message); + void handle_qmod_(const char *message); + void handle_qflag_(const char *message); + void handle_qpiws_(const char *message); + void handle_qt_(const char *message); + void handle_qmn_(const char *message); + + void skip_start_(const char *message, size_t *pos); + void skip_field_(const char *message, size_t *pos); + std::string read_field_(const char *message, size_t *pos); + + void read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor); + void read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor); + + void publish_binary_sensor_(esphome::optional b, binary_sensor::BinarySensor *sensor); + + esphome::optional get_bit_(std::string bits, uint8_t bit_pos); + std::string command_queue_[COMMAND_QUEUE_LENGTH]; uint8_t command_queue_position_ = 0; uint8_t read_buffer_[PIPSOLAR_READ_BUFFER_LENGTH]; @@ -213,11 +237,10 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { STATE_POLL_COMPLETE = 3, STATE_COMMAND_COMPLETE = 4, STATE_POLL_CHECKED = 5, - STATE_POLL_DECODED = 6, }; uint8_t last_polling_command_ = 0; - PollingCommand used_polling_commands_[POLLING_COMMANDS_MAX]; + PollingCommand enabled_polling_commands_[POLLING_COMMANDS_MAX]; }; } // namespace pipsolar diff --git a/esphome/components/pipsolar/switch/pipsolar_switch.cpp b/esphome/components/pipsolar/switch/pipsolar_switch.cpp index be7763226b..649d951618 100644 --- a/esphome/components/pipsolar/switch/pipsolar_switch.cpp +++ b/esphome/components/pipsolar/switch/pipsolar_switch.cpp @@ -11,11 +11,11 @@ void PipsolarSwitch::dump_config() { LOG_SWITCH("", "Pipsolar Switch", this); } void PipsolarSwitch::write_state(bool state) { if (state) { if (!this->on_command_.empty()) { - this->parent_->switch_command(this->on_command_); + this->parent_->queue_command(this->on_command_); } } else { if (!this->off_command_.empty()) { - this->parent_->switch_command(this->off_command_); + this->parent_->queue_command(this->off_command_); } } } From e988905c2f498634a6eabb11718d3dd9a6dbf454 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 08:31:59 -1000 Subject: [PATCH 198/336] [json] Add basic compile tests (#11409) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tests/components/json/common.yaml | 33 +++++++++++++++++++++ tests/components/json/test.esp32-idf.yaml | 1 + tests/components/json/test.esp8266-ard.yaml | 1 + 3 files changed, 35 insertions(+) create mode 100644 tests/components/json/common.yaml create mode 100644 tests/components/json/test.esp32-idf.yaml create mode 100644 tests/components/json/test.esp8266-ard.yaml diff --git a/tests/components/json/common.yaml b/tests/components/json/common.yaml new file mode 100644 index 0000000000..f4074e1172 --- /dev/null +++ b/tests/components/json/common.yaml @@ -0,0 +1,33 @@ +json: + +interval: + - interval: 60s + then: + - lambda: |- + // Test build_json + std::string json_str = esphome::json::build_json([](JsonObject root) { + root["sensor"] = "temperature"; + root["value"] = 23.5; + root["unit"] = "°C"; + }); + ESP_LOGD("test", "Built JSON: %s", json_str.c_str()); + + // Test parse_json + bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) { + if (root.containsKey("sensor") && root.containsKey("value")) { + const char* sensor = root["sensor"]; + float value = root["value"]; + ESP_LOGD("test", "Parsed: sensor=%s, value=%.1f", sensor, value); + } else { + ESP_LOGD("test", "Parsed JSON missing required keys"); + } + }); + ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed"); + + // Test JsonBuilder class + esphome::json::JsonBuilder builder; + JsonObject obj = builder.root(); + obj["test"] = "direct_builder"; + obj["count"] = 42; + std::string result = builder.serialize(); + ESP_LOGD("test", "JsonBuilder result: %s", result.c_str()); diff --git a/tests/components/json/test.esp32-idf.yaml b/tests/components/json/test.esp32-idf.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/json/test.esp32-idf.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/json/test.esp8266-ard.yaml b/tests/components/json/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/json/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 46101fd8308d31c2c46c5c161fc320baafa889c0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 09:25:03 -1000 Subject: [PATCH 199/336] Add tests for FilterOutValueFilter and ThrottleWithPriorityFilter (#11408) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tests/components/sensor/common.yaml | 74 ++++ .../fixtures/sensor_filters_value_list.yaml | 332 ++++++++++++++++++ .../test_sensor_filters_value_list.py | 263 ++++++++++++++ 3 files changed, 669 insertions(+) create mode 100644 tests/integration/fixtures/sensor_filters_value_list.yaml create mode 100644 tests/integration/test_sensor_filters_value_list.py diff --git a/tests/components/sensor/common.yaml b/tests/components/sensor/common.yaml index ace7d0a38a..3f81f3f9ef 100644 --- a/tests/components/sensor/common.yaml +++ b/tests/components/sensor/common.yaml @@ -99,3 +99,77 @@ sensor: window_size: 10 send_every: 10 send_first_at: 1 # Send after first value + + # ValueListFilter-based filters tests + # FilterOutValueFilter - single value + - platform: copy + source_id: source_sensor + name: "Filter Out Single Value" + filters: + - filter_out: 42.0 # Should filter out exactly 42.0 + + # FilterOutValueFilter - multiple values + - platform: copy + source_id: source_sensor + name: "Filter Out Multiple Values" + filters: + - filter_out: [0.0, 42.0, 100.0] # List of values to filter + + # FilterOutValueFilter - with NaN + - platform: copy + source_id: source_sensor + name: "Filter Out NaN" + filters: + - filter_out: nan # Filter out NaN values + + # FilterOutValueFilter - mixed values with NaN + - platform: copy + source_id: source_sensor + name: "Filter Out Mixed with NaN" + filters: + - filter_out: [nan, 0.0, 42.0] + + # ThrottleWithPriorityFilter - single priority value + - platform: copy + source_id: source_sensor + name: "Throttle with Single Priority" + filters: + - throttle_with_priority: + timeout: 1000ms + value: 42.0 # Priority value bypasses throttle + + # ThrottleWithPriorityFilter - multiple priority values + - platform: copy + source_id: source_sensor + name: "Throttle with Multiple Priorities" + filters: + - throttle_with_priority: + timeout: 500ms + value: [0.0, 42.0, 100.0] # Multiple priority values + + # ThrottleWithPriorityFilter - with NaN priority + - platform: copy + source_id: source_sensor + name: "Throttle with NaN Priority" + filters: + - throttle_with_priority: + timeout: 1000ms + value: nan # NaN as priority value + + # Combined filters - FilterOutValueFilter + other filters + - platform: copy + source_id: source_sensor + name: "Filter Out Then Throttle" + filters: + - filter_out: [0.0, 100.0] + - throttle: 500ms + + # Combined filters - ThrottleWithPriorityFilter + other filters + - platform: copy + source_id: source_sensor + name: "Throttle Priority Then Scale" + filters: + - throttle_with_priority: + timeout: 1000ms + value: [42.0] + - multiply: 2.0 diff --git a/tests/integration/fixtures/sensor_filters_value_list.yaml b/tests/integration/fixtures/sensor_filters_value_list.yaml new file mode 100644 index 0000000000..2b796a5be1 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_value_list.yaml @@ -0,0 +1,332 @@ +esphome: + name: test-value-list-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensors - one for each test to avoid cross-test interference +sensor: + - platform: template + name: "Source Sensor 1" + id: source_sensor_1 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 2" + id: source_sensor_2 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 3" + id: source_sensor_3 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 4" + id: source_sensor_4 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 5" + id: source_sensor_5 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 6" + id: source_sensor_6 + accuracy_decimals: 2 + + - platform: template + name: "Source Sensor 7" + id: source_sensor_7 + accuracy_decimals: 1 + + # FilterOutValueFilter - single value + - platform: copy + source_id: source_sensor_1 + name: "Filter Out Single" + id: filter_out_single + filters: + - filter_out: 42.0 + + # FilterOutValueFilter - multiple values + - platform: copy + source_id: source_sensor_2 + name: "Filter Out Multiple" + id: filter_out_multiple + filters: + - filter_out: [0.0, 42.0, 100.0] + + # FilterOutValueFilter - with NaN + - platform: copy + source_id: source_sensor_1 + name: "Filter Out NaN" + id: filter_out_nan + filters: + - filter_out: nan + + # ThrottleWithPriorityFilter - single priority value + - platform: copy + source_id: source_sensor_3 + name: "Throttle Priority Single" + id: throttle_priority_single + filters: + - throttle_with_priority: + timeout: 200ms + value: 42.0 + + # ThrottleWithPriorityFilter - multiple priority values + - platform: copy + source_id: source_sensor_4 + name: "Throttle Priority Multiple" + id: throttle_priority_multiple + filters: + - throttle_with_priority: + timeout: 200ms + value: [0.0, 42.0, 100.0] + + # Edge case: Filter Out NaN explicitly + - platform: copy + source_id: source_sensor_5 + name: "Filter Out NaN Test" + id: filter_out_nan_test + filters: + - filter_out: nan + + # Edge case: Accuracy decimals - 2 decimals + - platform: copy + source_id: source_sensor_6 + name: "Filter Out Accuracy 2" + id: filter_out_accuracy_2 + filters: + - filter_out: 42.0 + + # Edge case: Throttle with NaN priority + - platform: copy + source_id: source_sensor_7 + name: "Throttle Priority NaN" + id: throttle_priority_nan + filters: + - throttle_with_priority: + timeout: 200ms + value: nan + +# Script to test FilterOutValueFilter +script: + - id: test_filter_out_single + then: + # Should pass through: 1.0, 2.0, 3.0 + # Should filter out: 42.0 + - sensor.template.publish: + id: source_sensor_1 + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 3.0 + + - id: test_filter_out_multiple + then: + # Should filter out: 0.0, 42.0, 100.0 + # Should pass through: 1.0, 2.0, 50.0 + - sensor.template.publish: + id: source_sensor_2 + state: 0.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 100.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 50.0 + + - id: test_throttle_priority_single + then: + # 42.0 bypasses throttle, other values are throttled + - sensor.template.publish: + id: source_sensor_3 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 42.0 # Priority - passes immediately + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 3.0 # Throttled + - delay: 250ms # Wait for throttle to expire + - sensor.template.publish: + id: source_sensor_3 + state: 4.0 # Passes after timeout + + - id: test_throttle_priority_multiple + then: + # 0.0, 42.0, 100.0 bypass throttle + - sensor.template.publish: + id: source_sensor_4 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 0.0 # Priority - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 3.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 42.0 # Priority - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 4.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 100.0 # Priority - passes + + - id: test_filter_out_nan + then: + # NaN should be filtered out, regular values pass + - sensor.template.publish: + id: source_sensor_5 + state: 1.0 # Pass + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: !lambda 'return NAN;' # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: 2.0 # Pass + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: !lambda 'return NAN;' # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: 3.0 # Pass + + - id: test_filter_out_accuracy_2 + then: + # With 2 decimal places, 42.00 filtered, 42.01 and 42.15 pass + - sensor.template.publish: + id: source_sensor_6 + state: 42.0 # Filtered (rounds to 42.00) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.01 # Pass (rounds to 42.01) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.15 # Pass (rounds to 42.15) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.0 # Filtered (rounds to 42.00) + + - id: test_throttle_priority_nan + then: + # NaN bypasses throttle, regular values throttled + - sensor.template.publish: + id: source_sensor_7 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: !lambda 'return NAN;' # Priority NaN - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: 3.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: !lambda 'return NAN;' # Priority NaN - passes + +# Buttons to trigger each test +button: + - platform: template + name: "Test Filter Out Single" + id: btn_filter_out_single + on_press: + - script.execute: test_filter_out_single + + - platform: template + name: "Test Filter Out Multiple" + id: btn_filter_out_multiple + on_press: + - script.execute: test_filter_out_multiple + + - platform: template + name: "Test Throttle Priority Single" + id: btn_throttle_priority_single + on_press: + - script.execute: test_throttle_priority_single + + - platform: template + name: "Test Throttle Priority Multiple" + id: btn_throttle_priority_multiple + on_press: + - script.execute: test_throttle_priority_multiple + + - platform: template + name: "Test Filter Out NaN" + id: btn_filter_out_nan + on_press: + - script.execute: test_filter_out_nan + + - platform: template + name: "Test Filter Out Accuracy 2" + id: btn_filter_out_accuracy_2 + on_press: + - script.execute: test_filter_out_accuracy_2 + + - platform: template + name: "Test Throttle Priority NaN" + id: btn_throttle_priority_nan + on_press: + - script.execute: test_throttle_priority_nan diff --git a/tests/integration/test_sensor_filters_value_list.py b/tests/integration/test_sensor_filters_value_list.py new file mode 100644 index 0000000000..87323fc730 --- /dev/null +++ b/tests/integration/test_sensor_filters_value_list.py @@ -0,0 +1,263 @@ +"""Test sensor ValueListFilter functionality (FilterOutValueFilter and ThrottleWithPriorityFilter).""" + +from __future__ import annotations + +import asyncio +import math + +from aioesphomeapi import ButtonInfo, EntityState, SensorState +import pytest + +from .state_utils import InitialStateHelper, build_key_to_entity_mapping +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_sensor_filters_value_list( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ValueListFilter-based filters work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for all sensors + sensor_values: dict[str, list[float]] = { + "filter_out_single": [], + "filter_out_multiple": [], + "throttle_priority_single": [], + "throttle_priority_multiple": [], + "filter_out_nan_test": [], + "filter_out_accuracy_2": [], + "throttle_priority_nan": [], + } + + # Futures for each test + filter_out_single_done = loop.create_future() + filter_out_multiple_done = loop.create_future() + throttle_single_done = loop.create_future() + throttle_multiple_done = loop.create_future() + filter_out_nan_done = loop.create_future() + filter_out_accuracy_2_done = loop.create_future() + throttle_nan_done = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState) or state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + if sensor_name not in sensor_values: + return + + sensor_values[sensor_name].append(state.state) + + # Check completion conditions + if ( + sensor_name == "filter_out_single" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_single_done.done() + ): + filter_out_single_done.set_result(True) + elif ( + sensor_name == "filter_out_multiple" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_multiple_done.done() + ): + filter_out_multiple_done.set_result(True) + elif ( + sensor_name == "throttle_priority_single" + and len(sensor_values[sensor_name]) == 3 + and not throttle_single_done.done() + ): + throttle_single_done.set_result(True) + elif ( + sensor_name == "throttle_priority_multiple" + and len(sensor_values[sensor_name]) == 4 + and not throttle_multiple_done.done() + ): + throttle_multiple_done.set_result(True) + elif ( + sensor_name == "filter_out_nan_test" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_nan_done.done() + ): + filter_out_nan_done.set_result(True) + elif ( + sensor_name == "filter_out_accuracy_2" + and len(sensor_values[sensor_name]) == 2 + and not filter_out_accuracy_2_done.done() + ): + filter_out_accuracy_2_done.set_result(True) + elif ( + sensor_name == "throttle_priority_nan" + and len(sensor_values[sensor_name]) == 3 + and not throttle_nan_done.done() + ): + throttle_nan_done.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities and build key mapping + entities, _ = await client.list_entities_services() + key_to_sensor = build_key_to_entity_mapping( + entities, + { + "filter_out_single": "Filter Out Single", + "filter_out_multiple": "Filter Out Multiple", + "throttle_priority_single": "Throttle Priority Single", + "throttle_priority_multiple": "Throttle Priority Multiple", + "filter_out_nan_test": "Filter Out NaN Test", + "filter_out_accuracy_2": "Filter Out Accuracy 2", + "throttle_priority_nan": "Throttle Priority NaN", + }, + ) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states + await initial_state_helper.wait_for_initial_states() + + # Find all buttons + button_name_map = { + "Test Filter Out Single": "filter_out_single", + "Test Filter Out Multiple": "filter_out_multiple", + "Test Throttle Priority Single": "throttle_priority_single", + "Test Throttle Priority Multiple": "throttle_priority_multiple", + "Test Filter Out NaN": "filter_out_nan", + "Test Filter Out Accuracy 2": "filter_out_accuracy_2", + "Test Throttle Priority NaN": "throttle_priority_nan", + } + buttons = {} + for entity in entities: + if isinstance(entity, ButtonInfo) and entity.name in button_name_map: + buttons[button_name_map[entity.name]] = entity.key + + assert len(buttons) == 7, f"Expected 7 buttons, found {len(buttons)}" + + # Test 1: FilterOutValueFilter - single value + sensor_values["filter_out_single"].clear() + client.button_command(buttons["filter_out_single"]) + try: + await asyncio.wait_for(filter_out_single_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 1 timed out. Values: {sensor_values['filter_out_single']}" + ) + + expected = [1.0, 2.0, 3.0] + assert sensor_values["filter_out_single"] == pytest.approx(expected), ( + f"Test 1 failed: expected {expected}, got {sensor_values['filter_out_single']}" + ) + + # Test 2: FilterOutValueFilter - multiple values + sensor_values["filter_out_multiple"].clear() + filter_out_multiple_done = loop.create_future() + client.button_command(buttons["filter_out_multiple"]) + try: + await asyncio.wait_for(filter_out_multiple_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 2 timed out. Values: {sensor_values['filter_out_multiple']}" + ) + + expected = [1.0, 2.0, 50.0] + assert sensor_values["filter_out_multiple"] == pytest.approx(expected), ( + f"Test 2 failed: expected {expected}, got {sensor_values['filter_out_multiple']}" + ) + + # Test 3: ThrottleWithPriorityFilter - single priority + sensor_values["throttle_priority_single"].clear() + throttle_single_done = loop.create_future() + client.button_command(buttons["throttle_priority_single"]) + try: + await asyncio.wait_for(throttle_single_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 3 timed out. Values: {sensor_values['throttle_priority_single']}" + ) + + expected = [1.0, 42.0, 4.0] + assert sensor_values["throttle_priority_single"] == pytest.approx(expected), ( + f"Test 3 failed: expected {expected}, got {sensor_values['throttle_priority_single']}" + ) + + # Test 4: ThrottleWithPriorityFilter - multiple priorities + sensor_values["throttle_priority_multiple"].clear() + throttle_multiple_done = loop.create_future() + client.button_command(buttons["throttle_priority_multiple"]) + try: + await asyncio.wait_for(throttle_multiple_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 4 timed out. Values: {sensor_values['throttle_priority_multiple']}" + ) + + expected = [1.0, 0.0, 42.0, 100.0] + assert sensor_values["throttle_priority_multiple"] == pytest.approx(expected), ( + f"Test 4 failed: expected {expected}, got {sensor_values['throttle_priority_multiple']}" + ) + + # Test 5: FilterOutValueFilter - NaN handling + sensor_values["filter_out_nan_test"].clear() + filter_out_nan_done = loop.create_future() + client.button_command(buttons["filter_out_nan"]) + try: + await asyncio.wait_for(filter_out_nan_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 5 timed out. Values: {sensor_values['filter_out_nan_test']}" + ) + + expected = [1.0, 2.0, 3.0] + assert sensor_values["filter_out_nan_test"] == pytest.approx(expected), ( + f"Test 5 failed: expected {expected}, got {sensor_values['filter_out_nan_test']}" + ) + + # Test 6: FilterOutValueFilter - Accuracy decimals (2) + sensor_values["filter_out_accuracy_2"].clear() + filter_out_accuracy_2_done = loop.create_future() + client.button_command(buttons["filter_out_accuracy_2"]) + try: + await asyncio.wait_for(filter_out_accuracy_2_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 6 timed out. Values: {sensor_values['filter_out_accuracy_2']}" + ) + + expected = [42.01, 42.15] + assert sensor_values["filter_out_accuracy_2"] == pytest.approx(expected), ( + f"Test 6 failed: expected {expected}, got {sensor_values['filter_out_accuracy_2']}" + ) + + # Test 7: ThrottleWithPriorityFilter - NaN priority + sensor_values["throttle_priority_nan"].clear() + throttle_nan_done = loop.create_future() + client.button_command(buttons["throttle_priority_nan"]) + try: + await asyncio.wait_for(throttle_nan_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 7 timed out. Values: {sensor_values['throttle_priority_nan']}" + ) + + # First value (1.0) + two NaN priority values + # NaN values will be compared using math.isnan + assert len(sensor_values["throttle_priority_nan"]) == 3, ( + f"Test 7 failed: expected 3 values, got {len(sensor_values['throttle_priority_nan'])}" + ) + assert sensor_values["throttle_priority_nan"][0] == pytest.approx(1.0), ( + f"Test 7 failed: first value should be 1.0, got {sensor_values['throttle_priority_nan'][0]}" + ) + assert math.isnan(sensor_values["throttle_priority_nan"][1]), ( + f"Test 7 failed: second value should be NaN, got {sensor_values['throttle_priority_nan'][1]}" + ) + assert math.isnan(sensor_values["throttle_priority_nan"][2]), ( + f"Test 7 failed: third value should be NaN, got {sensor_values['throttle_priority_nan'][2]}" + ) From e23d66a8cf9b260e4b06152009587e87885f1904 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 10:38:34 -1000 Subject: [PATCH 200/336] [esp32] Automatic CONFIG_LWIP_MAX_SOCKETS configuration based on component needs (#11378) --- esphome/components/api/__init__.py | 12 +++ esphome/components/esp32/__init__.py | 74 +++++++++++++++++++ .../esp32_camera_web_server/__init__.py | 29 ++++++-- esphome/components/esphome/ota/__init__.py | 14 +++- esphome/components/mdns/__init__.py | 15 ++++ esphome/components/mqtt/__init__.py | 11 +++ esphome/components/socket/__init__.py | 28 +++++++ esphome/components/web_server/__init__.py | 13 ++++ 8 files changed, 187 insertions(+), 9 deletions(-) diff --git a/esphome/components/api/__init__.py b/esphome/components/api/__init__.py index e8dacf51bc..e91e922204 100644 --- a/esphome/components/api/__init__.py +++ b/esphome/components/api/__init__.py @@ -155,6 +155,17 @@ def _validate_api_config(config: ConfigType) -> ConfigType: return config +def _consume_api_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for API component.""" + from esphome.components import socket + + # API needs 1 listening socket + typically 3 concurrent client connections + # (not max_connections, which is the upper limit rarely reached) + sockets_needed = 1 + 3 + socket.consume_sockets(sockets_needed, "api")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -222,6 +233,7 @@ CONFIG_SCHEMA = cv.All( ).extend(cv.COMPONENT_SCHEMA), cv.rename_key(CONF_SERVICES, CONF_ACTIONS), _validate_api_config, + _consume_api_sockets, ) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index af84692615..99a87e06f9 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -1,3 +1,4 @@ +import contextlib from dataclasses import dataclass import itertools import logging @@ -102,6 +103,10 @@ COMPILER_OPTIMIZATIONS = { "SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE", } +# Socket limit configuration for ESP-IDF +# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10 +DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default + ARDUINO_ALLOWED_VARIANTS = [ VARIANT_ESP32, VARIANT_ESP32C3, @@ -746,6 +751,72 @@ CONFIG_SCHEMA = cv.All( FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate) +def _configure_lwip_max_sockets(conf: dict) -> None: + """Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs. + + Socket component tracks consumer needs via consume_sockets() called during config validation. + This function runs in to_code() after all components have registered their socket needs. + User-provided sdkconfig_options take precedence. + """ + from esphome.components.socket import KEY_SOCKET_CONSUMERS + + # Check if user manually specified CONFIG_LWIP_MAX_SOCKETS + user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get( + "CONFIG_LWIP_MAX_SOCKETS" + ) + + socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {}) + total_sockets = sum(socket_consumers.values()) + + # Early return if no sockets registered and no user override + if total_sockets == 0 and user_max_sockets is None: + return + + components_list = ", ".join( + f"{name}={count}" for name, count in sorted(socket_consumers.items()) + ) + + # User specified their own value - respect it but warn if insufficient + if user_max_sockets is not None: + _LOGGER.info( + "Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s", + user_max_sockets, + ) + + # Warn if user's value is less than what components need + if total_sockets > 0: + user_sockets_int = 0 + with contextlib.suppress(ValueError, TypeError): + user_sockets_int = int(user_max_sockets) + + if user_sockets_int < total_sockets: + _LOGGER.warning( + "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration " + "needs %d sockets (registered: %s). You may experience socket " + "exhaustion errors. Consider increasing to at least %d.", + user_sockets_int, + total_sockets, + components_list, + total_sockets, + ) + # User's value already added via sdkconfig_options processing + return + + # Auto-calculate based on component needs + # Use at least the ESP-IDF default (10), or the total needed by components + max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets) + + log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG + _LOGGER.log( + log_level, + "Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)", + max_sockets, + components_list, + ) + + add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets) + + async def to_code(config): cg.add_platformio_option("board", config[CONF_BOARD]) cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE]) @@ -866,6 +937,9 @@ async def to_code(config): add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False) if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False): add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0) + + _configure_lwip_max_sockets(conf) + if advanced.get(CONF_EXECUTE_FROM_PSRAM, False): add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True) add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True) diff --git a/esphome/components/esp32_camera_web_server/__init__.py b/esphome/components/esp32_camera_web_server/__init__.py index a6a7ac3630..ed1aaa2e07 100644 --- a/esphome/components/esp32_camera_web_server/__init__.py +++ b/esphome/components/esp32_camera_web_server/__init__.py @@ -1,6 +1,7 @@ import esphome.codegen as cg import esphome.config_validation as cv from esphome.const import CONF_ID, CONF_MODE, CONF_PORT +from esphome.types import ConfigType CODEOWNERS = ["@ayufan"] AUTO_LOAD = ["camera"] @@ -13,13 +14,27 @@ Mode = esp32_camera_web_server_ns.enum("Mode") MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT} -CONFIG_SCHEMA = cv.Schema( - { - cv.GenerateID(): cv.declare_id(CameraWebServer), - cv.Required(CONF_PORT): cv.port, - cv.Required(CONF_MODE): cv.enum(MODES, upper=True), - }, -).extend(cv.COMPONENT_SCHEMA) + +def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for camera web server.""" + from esphome.components import socket + + # Each camera web server instance needs 1 listening socket + 2 client connections + sockets_needed = 3 + socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config) + return config + + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(CameraWebServer), + cv.Required(CONF_PORT): cv.port, + cv.Required(CONF_MODE): cv.enum(MODES, upper=True), + }, + ).extend(cv.COMPONENT_SCHEMA), + _consume_camera_web_server_sockets, +) async def to_code(config): diff --git a/esphome/components/esphome/ota/__init__.py b/esphome/components/esphome/ota/__init__.py index 69a50a2de9..e56e85b231 100644 --- a/esphome/components/esphome/ota/__init__.py +++ b/esphome/components/esphome/ota/__init__.py @@ -103,7 +103,16 @@ def ota_esphome_final_validate(config): ) -CONFIG_SCHEMA = ( +def _consume_ota_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for OTA component.""" + from esphome.components import socket + + # OTA needs 1 listening socket (client connections are temporary during updates) + socket.consume_sockets(1, "ota")(config) + return config + + +CONFIG_SCHEMA = cv.All( cv.Schema( { cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent), @@ -130,7 +139,8 @@ CONFIG_SCHEMA = ( } ) .extend(BASE_OTA_SCHEMA) - .extend(cv.COMPONENT_SCHEMA) + .extend(cv.COMPONENT_SCHEMA), + _consume_ota_sockets, ) FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate diff --git a/esphome/components/mdns/__init__.py b/esphome/components/mdns/__init__.py index c6a9ee1a0c..4776bef22f 100644 --- a/esphome/components/mdns/__init__.py +++ b/esphome/components/mdns/__init__.py @@ -13,6 +13,7 @@ from esphome.const import ( ) from esphome.core import CORE, Lambda, coroutine_with_priority from esphome.coroutine import CoroPriority +from esphome.types import ConfigType CODEOWNERS = ["@esphome/core"] DEPENDENCIES = ["network"] @@ -46,6 +47,19 @@ SERVICE_SCHEMA = cv.Schema( } ) + +def _consume_mdns_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for mDNS component.""" + if config.get(CONF_DISABLED): + return config + + from esphome.components import socket + + # mDNS needs 2 sockets (IPv4 + IPv6 multicast) + socket.consume_sockets(2, "mdns")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -55,6 +69,7 @@ CONFIG_SCHEMA = cv.All( } ), _remove_id_if_disabled, + _consume_mdns_sockets, ) diff --git a/esphome/components/mqtt/__init__.py b/esphome/components/mqtt/__init__.py index 814fb566d4..641c70a367 100644 --- a/esphome/components/mqtt/__init__.py +++ b/esphome/components/mqtt/__init__.py @@ -58,6 +58,7 @@ from esphome.const import ( PlatformFramework, ) from esphome.core import CORE, CoroPriority, coroutine_with_priority +from esphome.types import ConfigType DEPENDENCIES = ["network"] @@ -210,6 +211,15 @@ def validate_fingerprint(value): return value +def _consume_mqtt_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for MQTT component.""" + from esphome.components import socket + + # MQTT needs 1 socket for the broker connection + socket.consume_sockets(1, "mqtt")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -306,6 +316,7 @@ CONFIG_SCHEMA = cv.All( ), validate_config, cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]), + _consume_mqtt_sockets, ) diff --git a/esphome/components/socket/__init__.py b/esphome/components/socket/__init__.py index e085a09eac..e6a4cfc07f 100644 --- a/esphome/components/socket/__init__.py +++ b/esphome/components/socket/__init__.py @@ -1,3 +1,5 @@ +from collections.abc import Callable, MutableMapping + import esphome.codegen as cg import esphome.config_validation as cv from esphome.core import CORE @@ -9,6 +11,32 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp" IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets" IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets" +# Socket tracking infrastructure +# Components register their socket needs and platforms read this to configure appropriately +KEY_SOCKET_CONSUMERS = "socket_consumers" + + +def consume_sockets( + value: int, consumer: str +) -> Callable[[MutableMapping], MutableMapping]: + """Register socket usage for a component. + + Args: + value: Number of sockets needed by the component + consumer: Name of the component consuming the sockets + + Returns: + A validator function that records the socket usage + """ + + def _consume_sockets(config: MutableMapping) -> MutableMapping: + consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {}) + consumers[consumer] = consumers.get(consumer, 0) + value + return config + + return _consume_sockets + + CONFIG_SCHEMA = cv.Schema( { cv.SplitDefault( diff --git a/esphome/components/web_server/__init__.py b/esphome/components/web_server/__init__.py index 288d928e80..a7fdf30eef 100644 --- a/esphome/components/web_server/__init__.py +++ b/esphome/components/web_server/__init__.py @@ -136,6 +136,18 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType: FINAL_VALIDATE_SCHEMA = _final_validate_sorting + +def _consume_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for web_server component.""" + from esphome.components import socket + + # Web server needs 1 listening socket + typically 2 concurrent client connections + # (browser makes 2 connections for page + event stream) + sockets_needed = 3 + socket.consume_sockets(sockets_needed, "web_server")(config) + return config + + sorting_group = { cv.Required(CONF_ID): cv.declare_id(cg.int_), cv.Required(CONF_NAME): cv.string, @@ -205,6 +217,7 @@ CONFIG_SCHEMA = cv.All( validate_local, validate_sorting_groups, validate_ota, + _consume_web_server_sockets, ) From 1706a69fad9e101b9e1afd2c613109bc273b6a41 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 10:38:49 -1000 Subject: [PATCH 201/336] [sensor] Optimize filter memory usage with ValueListFilter base class (#11407) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/components/sensor/__init__.py | 7 ++- esphome/components/sensor/filter.cpp | 73 +++++++++++++-------------- esphome/components/sensor/filter.h | 28 +++++++--- 3 files changed, 59 insertions(+), 49 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index d9724a741d..e603896f6d 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -261,9 +261,12 @@ ThrottleAverageFilter = sensor_ns.class_("ThrottleAverageFilter", Filter, cg.Com LambdaFilter = sensor_ns.class_("LambdaFilter", Filter) OffsetFilter = sensor_ns.class_("OffsetFilter", Filter) MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter) -FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter) +ValueListFilter = sensor_ns.class_("ValueListFilter", Filter) +FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", ValueListFilter) ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter) -ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter) +ThrottleWithPriorityFilter = sensor_ns.class_( + "ThrottleWithPriorityFilter", ValueListFilter +) TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component) DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component) HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 1eb0b84964..0d57c792db 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -228,27 +228,40 @@ MultiplyFilter::MultiplyFilter(TemplatableValue multiplier) : multiplier_ optional MultiplyFilter::new_value(float value) { return value * this->multiplier_.value(); } -// FilterOutValueFilter -FilterOutValueFilter::FilterOutValueFilter(std::vector> values_to_filter_out) - : values_to_filter_out_(std::move(values_to_filter_out)) {} +// ValueListFilter (base class) +ValueListFilter::ValueListFilter(std::initializer_list> values) : values_(values) {} -optional FilterOutValueFilter::new_value(float value) { +bool ValueListFilter::value_matches_any_(float sensor_value) { int8_t accuracy = this->parent_->get_accuracy_decimals(); float accuracy_mult = powf(10.0f, accuracy); - for (auto filter_value : this->values_to_filter_out_) { - if (std::isnan(filter_value.value())) { - if (std::isnan(value)) { - return {}; - } + float rounded_sensor = roundf(accuracy_mult * sensor_value); + + for (auto &filter_value : this->values_) { + float fv = filter_value.value(); + + // Handle NaN comparison + if (std::isnan(fv)) { + if (std::isnan(sensor_value)) + return true; continue; } - float rounded_filter_out = roundf(accuracy_mult * filter_value.value()); - float rounded_value = roundf(accuracy_mult * value); - if (rounded_filter_out == rounded_value) { - return {}; - } + + // Compare rounded values + if (roundf(accuracy_mult * fv) == rounded_sensor) + return true; } - return value; + + return false; +} + +// FilterOutValueFilter +FilterOutValueFilter::FilterOutValueFilter(std::initializer_list> values_to_filter_out) + : ValueListFilter(values_to_filter_out) {} + +optional FilterOutValueFilter::new_value(float value) { + if (this->value_matches_any_(value)) + return {}; // Filter out + return value; // Pass through } // ThrottleFilter @@ -263,33 +276,15 @@ optional ThrottleFilter::new_value(float value) { } // ThrottleWithPriorityFilter -ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs, - std::vector> prioritized_values) - : min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {} +ThrottleWithPriorityFilter::ThrottleWithPriorityFilter( + uint32_t min_time_between_inputs, std::initializer_list> prioritized_values) + : ValueListFilter(prioritized_values), min_time_between_inputs_(min_time_between_inputs) {} optional ThrottleWithPriorityFilter::new_value(float value) { - bool is_prioritized_value = false; - int8_t accuracy = this->parent_->get_accuracy_decimals(); - float accuracy_mult = powf(10.0f, accuracy); const uint32_t now = App.get_loop_component_start_time(); - // First, determine if the new value is one of the prioritized values - for (auto prioritized_value : this->prioritized_values_) { - if (std::isnan(prioritized_value.value())) { - if (std::isnan(value)) { - is_prioritized_value = true; - break; - } - continue; - } - float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value()); - float rounded_value = roundf(accuracy_mult * value); - if (rounded_prioritized_value == rounded_value) { - is_prioritized_value = true; - break; - } - } - // Finally, determine if the new value should be throttled and pass it through if not - if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) { + // Allow value through if: no previous input, time expired, or is prioritized + if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || + this->value_matches_any_(value)) { this->last_input_ = now; return value; } diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 57bb06b517..e09c66afcb 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -317,15 +317,28 @@ class MultiplyFilter : public Filter { TemplatableValue multiplier_; }; +/** Base class for filters that compare sensor values against a list of configured values. + * + * This base class provides common functionality for filters that need to check if a sensor + * value matches any value in a configured list, with proper handling of NaN values and + * accuracy-based rounding for comparisons. + */ +class ValueListFilter : public Filter { + protected: + explicit ValueListFilter(std::initializer_list> values); + + /// Check if sensor value matches any configured value (with accuracy rounding) + bool value_matches_any_(float sensor_value); + + FixedVector> values_; +}; + /// A simple filter that only forwards the filter chain if it doesn't receive `value_to_filter_out`. -class FilterOutValueFilter : public Filter { +class FilterOutValueFilter : public ValueListFilter { public: - explicit FilterOutValueFilter(std::vector> values_to_filter_out); + explicit FilterOutValueFilter(std::initializer_list> values_to_filter_out); optional new_value(float value) override; - - protected: - std::vector> values_to_filter_out_; }; class ThrottleFilter : public Filter { @@ -340,17 +353,16 @@ class ThrottleFilter : public Filter { }; /// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`. -class ThrottleWithPriorityFilter : public Filter { +class ThrottleWithPriorityFilter : public ValueListFilter { public: explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs, - std::vector> prioritized_values); + std::initializer_list> prioritized_values); optional new_value(float value) override; protected: uint32_t last_input_{0}; uint32_t min_time_between_inputs_; - std::vector> prioritized_values_; }; class TimeoutFilter : public Filter, public Component { From 0f4b54aa828ed5403e8f88ffd0db9861e42e97f9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 11:07:39 -1000 Subject: [PATCH 202/336] [esp32_improv, improv_base] Reduce flash usage by 352 bytes (#11406) --- .../esp32_improv/esp32_improv_component.cpp | 18 +++++++---- .../components/improv_base/improv_base.cpp | 32 ++++++++++--------- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/esphome/components/esp32_improv/esp32_improv_component.cpp b/esphome/components/esp32_improv/esp32_improv_component.cpp index 526f7f4b42..329349b531 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.cpp +++ b/esphome/components/esp32_improv/esp32_improv_component.cpp @@ -384,26 +384,32 @@ void ESP32ImprovComponent::check_wifi_connection_() { this->connecting_sta_ = {}; this->cancel_timeout("wifi-connect-timeout"); - std::vector urls; + // Build URL list with minimal allocations + // Maximum 3 URLs: custom next_url + ESPHOME_MY_LINK + webserver URL + std::string url_strings[3]; + size_t url_count = 0; // Add next_url if configured (should be first per Improv BLE spec) std::string next_url = this->get_formatted_next_url_(); if (!next_url.empty()) { - urls.push_back(next_url); + url_strings[url_count++] = std::move(next_url); } // Add default URLs for backward compatibility - urls.emplace_back(ESPHOME_MY_LINK); + url_strings[url_count++] = ESPHOME_MY_LINK; #ifdef USE_WEBSERVER for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) { if (ip.is_ip4()) { - std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT); - urls.push_back(webserver_url); + char url_buffer[64]; + snprintf(url_buffer, sizeof(url_buffer), "http://%s:%d", ip.str().c_str(), USE_WEBSERVER_PORT); + url_strings[url_count++] = url_buffer; break; } } #endif - std::vector data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls); + // Pass to build_rpc_response using vector constructor from iterators to avoid extra copies + std::vector data = improv::build_rpc_response( + improv::WIFI_SETTINGS, std::vector(url_strings, url_strings + url_count)); this->send_response_(data); } else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) { ESP_LOGD(TAG, "WiFi provisioned externally"); diff --git a/esphome/components/improv_base/improv_base.cpp b/esphome/components/improv_base/improv_base.cpp index 89ee5492b5..233098e6cd 100644 --- a/esphome/components/improv_base/improv_base.cpp +++ b/esphome/components/improv_base/improv_base.cpp @@ -6,6 +6,21 @@ namespace esphome { namespace improv_base { +static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}"; +static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1; +static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}"; +static constexpr size_t IP_ADDRESS_PLACEHOLDER_LEN = sizeof(IP_ADDRESS_PLACEHOLDER) - 1; + +static void replace_all_in_place(std::string &str, const char *placeholder, size_t placeholder_len, + const std::string &replacement) { + size_t pos = 0; + const size_t replacement_len = replacement.length(); + while ((pos = str.find(placeholder, pos)) != std::string::npos) { + str.replace(pos, placeholder_len, replacement); + pos += replacement_len; + } +} + std::string ImprovBase::get_formatted_next_url_() { if (this->next_url_.empty()) { return ""; @@ -14,28 +29,15 @@ std::string ImprovBase::get_formatted_next_url_() { std::string formatted_url = this->next_url_; // Replace all occurrences of {{device_name}} - const std::string device_name_placeholder = "{{device_name}}"; - const std::string &device_name = App.get_name(); - size_t pos = 0; - while ((pos = formatted_url.find(device_name_placeholder, pos)) != std::string::npos) { - formatted_url.replace(pos, device_name_placeholder.length(), device_name); - pos += device_name.length(); - } + replace_all_in_place(formatted_url, DEVICE_NAME_PLACEHOLDER, DEVICE_NAME_PLACEHOLDER_LEN, App.get_name()); // Replace all occurrences of {{ip_address}} - const std::string ip_address_placeholder = "{{ip_address}}"; - std::string ip_address_str; for (auto &ip : network::get_ip_addresses()) { if (ip.is_ip4()) { - ip_address_str = ip.str(); + replace_all_in_place(formatted_url, IP_ADDRESS_PLACEHOLDER, IP_ADDRESS_PLACEHOLDER_LEN, ip.str()); break; } } - pos = 0; - while ((pos = formatted_url.find(ip_address_placeholder, pos)) != std::string::npos) { - formatted_url.replace(pos, ip_address_placeholder.length(), ip_address_str); - pos += ip_address_str.length(); - } // Note: {{esphome_version}} is replaced at code generation time in Python From 97d91fee854a24f2a679ee4cce7f6e2da1f910e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 11:10:33 -1000 Subject: [PATCH 203/336] Bump pylint from 4.0.1 to 4.0.2 (#11418) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements_test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements_test.txt b/requirements_test.txt index 4c60a31d7f..5f94329e3f 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,4 +1,4 @@ -pylint==4.0.1 +pylint==4.0.2 flake8==7.3.0 # also change in .pre-commit-config.yaml when updating ruff==0.14.1 # also change in .pre-commit-config.yaml when updating pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating From 426511e78d89fe5db5a19888f61a4739a8373693 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 11:11:15 -1000 Subject: [PATCH 204/336] Bump actions/download-artifact from 4.3.0 to 5.0.0 (#11419) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f96f2ac14..0bfbfde527 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -764,13 +764,13 @@ jobs: python-version: ${{ env.DEFAULT_PYTHON }} cache-key: ${{ needs.common.outputs.cache-key }} - name: Download target analysis JSON - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: memory-analysis-target path: ./memory-analysis continue-on-error: true - name: Download PR analysis JSON - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: memory-analysis-pr path: ./memory-analysis From 6fbd0e3385a7ee350950397dd863c7b01061f3ef Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Mon, 20 Oct 2025 17:12:07 -0400 Subject: [PATCH 205/336] [esp32_hosted] Bump esp hosted (#11414) --- esphome/components/esp32_hosted/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/esp32_hosted/__init__.py b/esphome/components/esp32_hosted/__init__.py index 7e9f1b05b5..fde75517eb 100644 --- a/esphome/components/esp32_hosted/__init__.py +++ b/esphome/components/esp32_hosted/__init__.py @@ -95,7 +95,7 @@ async def to_code(config): if framework_ver >= cv.Version(5, 5, 0): esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5") esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3") - esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11") + esp32.add_idf_component(name="espressif/esp_hosted", ref="2.6.1") else: esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0") esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0") From ffb0e854b6a2d6f7be371104197bc3e453b4c7c6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 11:24:46 -1000 Subject: [PATCH 206/336] [ci] Optimize clang-tidy for small PRs by avoiding unnecessary job spitting (#11402) Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/workflows/ci.yml | 186 +++++++++++++++++++++++++--- script/determine-jobs.py | 32 +++++ tests/script/test_determine_jobs.py | 48 +++++++ 3 files changed, 248 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0bfbfde527..f085aedcc0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -170,11 +170,13 @@ jobs: outputs: integration-tests: ${{ steps.determine.outputs.integration-tests }} clang-tidy: ${{ steps.determine.outputs.clang-tidy }} + clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }} python-linters: ${{ steps.determine.outputs.python-linters }} changed-components: ${{ steps.determine.outputs.changed-components }} changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }} directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }} component-test-count: ${{ steps.determine.outputs.component-test-count }} + changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }} memory_impact: ${{ steps.determine.outputs.memory-impact }} steps: - name: Check out code from GitHub @@ -200,11 +202,13 @@ jobs: # Extract individual fields echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT + echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT + echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT integration-tests: @@ -243,7 +247,7 @@ jobs: . venv/bin/activate pytest -vv --no-cov --tb=native -n auto tests/integration/ - clang-tidy: + clang-tidy-single: name: ${{ matrix.name }} runs-on: ubuntu-24.04 needs: @@ -261,22 +265,6 @@ jobs: name: Run script/clang-tidy for ESP8266 options: --environment esp8266-arduino-tidy --grep USE_ESP8266 pio_cache_key: tidyesp8266 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 1/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 1 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 2/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 2 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 3/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 3 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 4/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 4 - pio_cache_key: tidyesp32 - id: clang-tidy name: Run script/clang-tidy for ESP32 IDF options: --environment esp32-idf-tidy --grep USE_ESP_IDF @@ -357,6 +345,166 @@ jobs: # yamllint disable-line rule:line-length if: always() + clang-tidy-nosplit: + name: Run script/clang-tidy for ESP32 Arduino + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit' + env: + GH_TOKEN: ${{ github.token }} + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Need history for HEAD~1 to work for checking changed files + fetch-depth: 2 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: github.ref == 'refs/heads/dev' + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Cache platformio + if: github.ref != 'refs/heads/dev' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/workflows/matchers/gcc.json" + echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" + + - name: Check if full clang-tidy scan needed + id: check_full_scan + run: | + . venv/bin/activate + if python script/clang_tidy_hash.py --check; then + echo "full_scan=true" >> $GITHUB_OUTPUT + echo "reason=hash_changed" >> $GITHUB_OUTPUT + else + echo "full_scan=false" >> $GITHUB_OUTPUT + echo "reason=normal" >> $GITHUB_OUTPUT + fi + + - name: Run clang-tidy + run: | + . venv/bin/activate + if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then + echo "Running FULL clang-tidy scan (hash changed)" + script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy + else + echo "Running clang-tidy on changed files only" + script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy + fi + env: + # Also cache libdeps, store them in a ~/.platformio subfolder + PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps + + - name: Suggested changes + run: script/ci-suggest-changes + if: always() + + clang-tidy-split: + name: ${{ matrix.name }} + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: needs.determine-jobs.outputs.clang-tidy-mode == 'split' + env: + GH_TOKEN: ${{ github.token }} + strategy: + fail-fast: false + max-parallel: 1 + matrix: + include: + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 1/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 1 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 2/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 2 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 3/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 3 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 4/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 4 + + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Need history for HEAD~1 to work for checking changed files + fetch-depth: 2 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: github.ref == 'refs/heads/dev' + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Cache platformio + if: github.ref != 'refs/heads/dev' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/workflows/matchers/gcc.json" + echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" + + - name: Check if full clang-tidy scan needed + id: check_full_scan + run: | + . venv/bin/activate + if python script/clang_tidy_hash.py --check; then + echo "full_scan=true" >> $GITHUB_OUTPUT + echo "reason=hash_changed" >> $GITHUB_OUTPUT + else + echo "full_scan=false" >> $GITHUB_OUTPUT + echo "reason=normal" >> $GITHUB_OUTPUT + fi + + - name: Run clang-tidy + run: | + . venv/bin/activate + if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then + echo "Running FULL clang-tidy scan (hash changed)" + script/clang-tidy --all-headers --fix ${{ matrix.options }} + else + echo "Running clang-tidy on changed files only" + script/clang-tidy --all-headers --fix --changed ${{ matrix.options }} + fi + env: + # Also cache libdeps, store them in a ~/.platformio subfolder + PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps + + - name: Suggested changes + run: script/ci-suggest-changes + if: always() + test-build-components-splitter: name: Split components for intelligent grouping (40 weighted per batch) runs-on: ubuntu-24.04 @@ -797,7 +945,9 @@ jobs: - pylint - pytest - integration-tests - - clang-tidy + - clang-tidy-single + - clang-tidy-nosplit + - clang-tidy-split - determine-jobs - test-build-components-splitter - test-build-components-split diff --git a/script/determine-jobs.py b/script/determine-jobs.py index a0e04a256e..c9aebd2cb7 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -61,6 +61,11 @@ from helpers import ( root_path, ) +# Threshold for splitting clang-tidy jobs +# For small PRs (< 65 files), use nosplit for faster CI +# For large PRs (>= 65 files), use split for better parallelization +CLANG_TIDY_SPLIT_THRESHOLD = 65 + class Platform(StrEnum): """Platform identifiers for memory impact analysis.""" @@ -210,6 +215,22 @@ def should_run_clang_tidy(branch: str | None = None) -> bool: return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS) +def count_changed_cpp_files(branch: str | None = None) -> int: + """Count the number of changed C++ files. + + This is used to determine whether to split clang-tidy jobs or run them as a single job. + For PRs with < 65 changed C++ files, running a single job is faster than splitting. + + Args: + branch: Branch to compare against. If None, uses default. + + Returns: + Number of changed C++ files. + """ + files = changed_files(branch) + return sum(1 for file in files if file.endswith(CPP_FILE_EXTENSIONS)) + + def should_run_clang_format(branch: str | None = None) -> bool: """Determine if clang-format should run based on changed files. @@ -412,6 +433,7 @@ def main() -> None: run_clang_tidy = should_run_clang_tidy(args.branch) run_clang_format = should_run_clang_format(args.branch) run_python_linters = should_run_python_linters(args.branch) + changed_cpp_file_count = count_changed_cpp_files(args.branch) # Get both directly changed and all changed components (with dependencies) in one call script_path = Path(__file__).parent / "list-components.py" @@ -449,10 +471,19 @@ def main() -> None: # Detect components for memory impact analysis (merged config) memory_impact = detect_memory_impact_config(args.branch) + if run_clang_tidy: + if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD: + clang_tidy_mode = "nosplit" + else: + clang_tidy_mode = "split" + else: + clang_tidy_mode = "disabled" + # Build output output: dict[str, Any] = { "integration_tests": run_integration, "clang_tidy": run_clang_tidy, + "clang_tidy_mode": clang_tidy_mode, "clang_format": run_clang_format, "python_linters": run_python_linters, "changed_components": changed_components, @@ -462,6 +493,7 @@ def main() -> None: "component_test_count": len(changed_components_with_tests), "directly_changed_count": len(directly_changed_with_tests), "dependency_only_count": len(dependency_only_components), + "changed_cpp_file_count": changed_cpp_file_count, "memory_impact": memory_impact, } diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 7587dbee69..02aaad2e3a 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -107,6 +107,7 @@ def test_main_all_tests_should_run( assert output["integration_tests"] is True assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] in ["nosplit", "split"] assert output["clang_format"] is True assert output["python_linters"] is True assert output["changed_components"] == ["wifi", "api", "sensor"] @@ -117,6 +118,9 @@ def test_main_all_tests_should_run( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" # No files changed @@ -156,11 +160,14 @@ def test_main_no_tests_should_run( assert output["integration_tests"] is False assert output["clang_tidy"] is False + assert output["clang_tidy_mode"] == "disabled" assert output["clang_format"] is False assert output["python_linters"] is False assert output["changed_components"] == [] assert output["changed_components_with_tests"] == [] assert output["component_test_count"] == 0 + # changed_cpp_file_count should be 0 + assert output["changed_cpp_file_count"] == 0 # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" @@ -239,6 +246,7 @@ def test_main_with_branch_argument( assert output["integration_tests"] is False assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] in ["nosplit", "split"] assert output["clang_format"] is False assert output["python_linters"] is True assert output["changed_components"] == ["mqtt"] @@ -249,6 +257,9 @@ def test_main_with_branch_argument( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" @@ -433,6 +444,40 @@ def test_should_run_clang_format_with_branch() -> None: mock_changed.assert_called_once_with("release") +@pytest.mark.parametrize( + ("changed_files", "expected_count"), + [ + (["esphome/core.cpp"], 1), + (["esphome/core.h"], 1), + (["test.hpp"], 1), + (["test.cc"], 1), + (["test.cxx"], 1), + (["test.c"], 1), + (["test.tcc"], 1), + (["esphome/core.cpp", "esphome/core.h"], 2), + (["esphome/core.cpp", "esphome/core.h", "test.cc"], 3), + (["README.md"], 0), + (["esphome/config.py"], 0), + (["README.md", "esphome/config.py"], 0), + (["esphome/core.cpp", "README.md", "esphome/config.py"], 1), + ([], 0), + ], +) +def test_count_changed_cpp_files(changed_files: list[str], expected_count: int) -> None: + """Test count_changed_cpp_files function.""" + with patch.object(determine_jobs, "changed_files", return_value=changed_files): + result = determine_jobs.count_changed_cpp_files() + assert result == expected_count + + +def test_count_changed_cpp_files_with_branch() -> None: + """Test count_changed_cpp_files with branch argument.""" + with patch.object(determine_jobs, "changed_files") as mock_changed: + mock_changed.return_value = [] + determine_jobs.count_changed_cpp_files("release") + mock_changed.assert_called_once_with("release") + + def test_main_filters_components_without_tests( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, @@ -501,6 +546,9 @@ def test_main_filters_components_without_tests( assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"} # component_test_count should be based on components with tests assert output["component_test_count"] == 2 + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" From 6a239f4d1c27c2dd157ea1e23c38af350cf02d66 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 11:25:33 -1000 Subject: [PATCH 207/336] [ci] Prefer platform-specific tests for memory impact analysis (#11398) --- script/determine-jobs.py | 145 +++++++++++++++++++++++++++++++++++---- 1 file changed, 130 insertions(+), 15 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index c9aebd2cb7..1877894fc4 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -83,11 +83,16 @@ MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core ch MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform # Platform preference order for memory impact analysis -# Prefer newer platforms first as they represent the future of ESPHome -# ESP8266 is most constrained but many new features don't support it +# This order is used when no platform-specific hints are detected from filenames +# Priority rationale: +# 1. ESP32-C6 IDF - Newest platform, supports Thread/Zigbee +# 2. ESP8266 Arduino - Most memory constrained (best for detecting memory impact), +# fastest build times, most sensitive to code size changes +# 3. ESP32 IDF - Primary ESP32 platform, most representative of modern ESPHome +# 4-6. Other ESP32 variants - Less commonly used but still supported MEMORY_IMPACT_PLATFORM_PREFERENCE = [ Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) - Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis) + Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained, fastest builds) Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) Platform.ESP32_C3_IDF, # ESP32-C3 IDF Platform.ESP32_S2_IDF, # ESP32-S2 IDF @@ -285,6 +290,91 @@ def _component_has_tests(component: str) -> bool: return bool(get_component_test_files(component)) +def _select_platform_by_preference( + platforms: list[Platform] | set[Platform], +) -> Platform: + """Select the most preferred platform from a list/set based on MEMORY_IMPACT_PLATFORM_PREFERENCE. + + Args: + platforms: List or set of platforms to choose from + + Returns: + The most preferred platform (earliest in MEMORY_IMPACT_PLATFORM_PREFERENCE) + """ + return min(platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index) + + +def _select_platform_by_count( + platform_counts: Counter[Platform], +) -> Platform: + """Select platform by count, using MEMORY_IMPACT_PLATFORM_PREFERENCE as tiebreaker. + + Args: + platform_counts: Counter mapping platforms to their counts + + Returns: + Platform with highest count, breaking ties by preference order + """ + return min( + platform_counts.keys(), + key=lambda p: ( + -platform_counts[p], # Negative to prefer higher counts + MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p), + ), + ) + + +def _detect_platform_hint_from_filename(filename: str) -> Platform | None: + """Detect platform hint from filename patterns. + + Detects platform-specific files using patterns like: + - wifi_component_esp_idf.cpp, *_idf.h -> ESP32 IDF variants + - wifi_component_esp8266.cpp, *_esp8266.h -> ESP8266_ARD + - *_esp32*.cpp -> ESP32 IDF (generic) + - *_libretiny.cpp, *_retiny.* -> LibreTiny (not in preference list) + - *_pico.cpp, *_rp2040.* -> RP2040 (not in preference list) + + Args: + filename: File path to check + + Returns: + Platform enum if a specific platform is detected, None otherwise + """ + filename_lower = filename.lower() + + # ESP-IDF platforms (check specific variants first) + if "esp_idf" in filename_lower or "_idf" in filename_lower: + # Check for specific ESP32 variants + if "c6" in filename_lower or "esp32c6" in filename_lower: + return Platform.ESP32_C6_IDF + if "c3" in filename_lower or "esp32c3" in filename_lower: + return Platform.ESP32_C3_IDF + if "s2" in filename_lower or "esp32s2" in filename_lower: + return Platform.ESP32_S2_IDF + if "s3" in filename_lower or "esp32s3" in filename_lower: + return Platform.ESP32_S3_IDF + # Default to ESP32 IDF for generic esp_idf files + return Platform.ESP32_IDF + + # ESP8266 Arduino + if "esp8266" in filename_lower: + return Platform.ESP8266_ARD + + # Generic ESP32 (without _idf suffix, could be Arduino or shared code) + # Prefer IDF as it's the modern platform + if "esp32" in filename_lower: + return Platform.ESP32_IDF + + # LibreTiny and RP2040 are not in MEMORY_IMPACT_PLATFORM_PREFERENCE + # so we don't return them as hints + # if "retiny" in filename_lower or "libretiny" in filename_lower: + # return None # No specific LibreTiny platform preference + # if "pico" in filename_lower or "rp2040" in filename_lower: + # return None # No RP2040 platform preference + + return None + + def detect_memory_impact_config( branch: str | None = None, ) -> dict[str, Any]: @@ -294,6 +384,9 @@ def detect_memory_impact_config( building a merged configuration with all changed components (like test_build_components.py does) to get comprehensive memory analysis. + When platform-specific files are detected (e.g., wifi_component_esp_idf.cpp), + prefers that platform for testing to ensure the most relevant memory analysis. + For core C++ file changes without component changes, runs a fallback analysis using a representative component to measure the impact. @@ -312,8 +405,10 @@ def detect_memory_impact_config( files = changed_files(branch) # Find all changed components (excluding core and base bus components) + # Also collect platform hints from platform-specific filenames changed_component_set: set[str] = set() has_core_cpp_changes = False + platform_hints: list[Platform] = [] for file in files: component = get_component_from_path(file) @@ -321,6 +416,10 @@ def detect_memory_impact_config( # Skip base bus components as they're used across many builds if component not in BASE_BUS_COMPONENTS: changed_component_set.add(component) + # Check if this is a platform-specific file + platform_hint = _detect_platform_hint_from_filename(file) + if platform_hint: + platform_hints.append(platform_hint) elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS): # Core ESPHome C++ files changed (not component-specific) # Only C++ files affect memory usage @@ -377,27 +476,42 @@ def detect_memory_impact_config( common_platforms &= platforms # Select the most preferred platform from the common set - # Exception: for core changes, use fallback platform (most representative of codebase) - if force_fallback_platform: + # Priority order: + # 1. Platform hints from filenames (e.g., wifi_component_esp_idf.cpp suggests ESP32_IDF) + # 2. Core changes use fallback platform (most representative of codebase) + # 3. Common platforms supported by all components + # 4. Most commonly supported platform + if platform_hints: + # Use most common platform hint that's also supported by all components + hint_counts = Counter(platform_hints) + # Filter to only hints that are in common_platforms (if any common platforms exist) + valid_hints = ( + [h for h in hint_counts if h in common_platforms] + if common_platforms + else list(hint_counts.keys()) + ) + if valid_hints: + platform = _select_platform_by_count( + Counter({p: hint_counts[p] for p in valid_hints}) + ) + elif common_platforms: + # Hints exist but none match common platforms, use common platform logic + platform = _select_platform_by_preference(common_platforms) + else: + # Use the most common hint even if it's not in common platforms + platform = _select_platform_by_count(hint_counts) + elif force_fallback_platform: platform = MEMORY_IMPACT_FALLBACK_PLATFORM elif common_platforms: # Pick the most preferred platform that all components support - platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index) + platform = _select_platform_by_preference(common_platforms) else: # No common platform - pick the most commonly supported platform - # This allows testing components individually even if they can't be merged # Count how many components support each platform platform_counts = Counter( p for platforms in component_platforms_map.values() for p in platforms ) - # Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE - platform = max( - platform_counts.keys(), - key=lambda p: ( - platform_counts[p], - -MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p), - ), - ) + platform = _select_platform_by_count(platform_counts) # Debug output print("Memory impact analysis:", file=sys.stderr) @@ -407,6 +521,7 @@ def detect_memory_impact_config( f" Component platforms: {dict(sorted(component_platforms_map.items()))}", file=sys.stderr, ) + print(f" Platform hints from filenames: {platform_hints}", file=sys.stderr) print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr) print(f" Selected platform: {platform}", file=sys.stderr) From c34a57df7b65c151f761e7afed5d042cb0f75d3f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 12:48:27 -1000 Subject: [PATCH 208/336] text_sensor filters --- esphome/components/text_sensor/__init__.py | 25 +++++-- esphome/components/text_sensor/filter.cpp | 27 ++++++-- esphome/components/text_sensor/filter.h | 44 +++++++++---- tests/components/text_sensor/common.yaml | 66 +++++++++++++++++++ .../text_sensor/test.esp8266-ard.yaml | 1 + 5 files changed, 140 insertions(+), 23 deletions(-) create mode 100644 tests/components/text_sensor/common.yaml create mode 100644 tests/components/text_sensor/test.esp8266-ard.yaml diff --git a/esphome/components/text_sensor/__init__.py b/esphome/components/text_sensor/__init__.py index f7b3b5c55e..7a9e947abd 100644 --- a/esphome/components/text_sensor/__init__.py +++ b/esphome/components/text_sensor/__init__.py @@ -110,17 +110,28 @@ def validate_mapping(value): "substitute", SubstituteFilter, cv.ensure_list(validate_mapping) ) async def substitute_filter_to_code(config, filter_id): - from_strings = [conf[CONF_FROM] for conf in config] - to_strings = [conf[CONF_TO] for conf in config] - return cg.new_Pvariable(filter_id, from_strings, to_strings) + substitutions = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, substitutions) @FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping)) async def map_filter_to_code(config, filter_id): - map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string) - return cg.new_Pvariable( - filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config]) - ) + mappings = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, mappings) validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_") diff --git a/esphome/components/text_sensor/filter.cpp b/esphome/components/text_sensor/filter.cpp index 80edae2b6c..92cf8fdb9b 100644 --- a/esphome/components/text_sensor/filter.cpp +++ b/esphome/components/text_sensor/filter.cpp @@ -62,19 +62,36 @@ optional AppendFilter::new_value(std::string value) { return value optional PrependFilter::new_value(std::string value) { return this->prefix_ + value; } // Substitute +SubstituteFilter::SubstituteFilter(std::initializer_list substitutions) { + this->substitutions_.init(substitutions.size()); + for (auto &sub : substitutions) { + this->substitutions_.push_back(std::move(sub)); + } +} + optional SubstituteFilter::new_value(std::string value) { std::size_t pos; - for (size_t i = 0; i < this->from_strings_.size(); i++) { - while ((pos = value.find(this->from_strings_[i])) != std::string::npos) - value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]); + for (const auto &sub : this->substitutions_) { + while ((pos = value.find(sub.from)) != std::string::npos) + value.replace(pos, sub.from.size(), sub.to); } return value; } // Map +MapFilter::MapFilter(std::initializer_list mappings) { + this->mappings_.init(mappings.size()); + for (auto &mapping : mappings) { + this->mappings_.push_back(std::move(mapping)); + } +} + optional MapFilter::new_value(std::string value) { - auto item = mappings_.find(value); - return item == mappings_.end() ? value : item->second; + for (const auto &mapping : this->mappings_) { + if (mapping.from == value) + return mapping.to; + } + return value; // Pass through if no match } } // namespace text_sensor diff --git a/esphome/components/text_sensor/filter.h b/esphome/components/text_sensor/filter.h index 2de9010b88..fcb1c4b347 100644 --- a/esphome/components/text_sensor/filter.h +++ b/esphome/components/text_sensor/filter.h @@ -2,10 +2,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include -#include -#include -#include namespace esphome { namespace text_sensor { @@ -98,26 +94,52 @@ class PrependFilter : public Filter { std::string prefix_; }; +struct Substitution { + std::string from; + std::string to; +}; + /// A simple filter that replaces a substring with another substring class SubstituteFilter : public Filter { public: - SubstituteFilter(std::vector from_strings, std::vector to_strings) - : from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {} + explicit SubstituteFilter(std::initializer_list substitutions); optional new_value(std::string value) override; protected: - std::vector from_strings_; - std::vector to_strings_; + FixedVector substitutions_; }; -/// A filter that maps values from one set to another +/** A filter that maps values from one set to another + * + * Uses linear search instead of std::map for typical small datasets (2-20 mappings). + * Linear search on contiguous memory is faster than red-black tree lookups when: + * - Dataset is small (< ~30 items) + * - Memory is contiguous (cache-friendly, better CPU cache utilization) + * - No pointer chasing overhead (tree node traversal) + * - String comparison cost dominates lookup time + * + * Benchmark results (see benchmark_map_filter.cpp): + * - 2 mappings: Linear 1.26x faster than std::map + * - 5 mappings: Linear 2.25x faster than std::map + * - 10 mappings: Linear 1.83x faster than std::map + * - 20 mappings: Linear 1.59x faster than std::map + * - 30 mappings: Linear 1.09x faster than std::map + * - 40 mappings: std::map 1.27x faster than Linear (break-even) + * + * Benefits over std::map: + * - ~2KB smaller flash (no red-black tree code) + * - ~24-32 bytes less RAM per mapping (no tree node overhead) + * - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare) + * + * Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20 + */ class MapFilter : public Filter { public: - MapFilter(std::map mappings) : mappings_(std::move(mappings)) {} + explicit MapFilter(std::initializer_list mappings); optional new_value(std::string value) override; protected: - std::map mappings_; + FixedVector mappings_; }; } // namespace text_sensor diff --git a/tests/components/text_sensor/common.yaml b/tests/components/text_sensor/common.yaml new file mode 100644 index 0000000000..4459c0fa44 --- /dev/null +++ b/tests/components/text_sensor/common.yaml @@ -0,0 +1,66 @@ +text_sensor: + - platform: template + name: "Test Substitute Single" + id: test_substitute_single + filters: + - substitute: + - ERROR -> Error + + - platform: template + name: "Test Substitute Multiple" + id: test_substitute_multiple + filters: + - substitute: + - ERROR -> Error + - WARN -> Warning + - INFO -> Information + - DEBUG -> Debug + + - platform: template + name: "Test Substitute Chained" + id: test_substitute_chained + filters: + - substitute: + - foo -> bar + - to_upper + - substitute: + - BAR -> baz + + - platform: template + name: "Test Map Single" + id: test_map_single + filters: + - map: + - ON -> Active + + - platform: template + name: "Test Map Multiple" + id: test_map_multiple + filters: + - map: + - ON -> Active + - OFF -> Inactive + - UNKNOWN -> Error + - IDLE -> Standby + + - platform: template + name: "Test Map Passthrough" + id: test_map_passthrough + filters: + - map: + - Good -> Excellent + - Bad -> Poor + + - platform: template + name: "Test All Filters" + id: test_all_filters + filters: + - to_upper + - to_lower + - append: " suffix" + - prepend: "prefix " + - substitute: + - prefix -> PREFIX + - suffix -> SUFFIX + - map: + - PREFIX text SUFFIX -> mapped diff --git a/tests/components/text_sensor/test.esp8266-ard.yaml b/tests/components/text_sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/text_sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From d13b50077f4fc14e7c143e16fe8f84f6f2881c11 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 13:00:15 -1000 Subject: [PATCH 209/336] Add basic text_sensor tests --- tests/components/text_sensor/common.yaml | 66 +++++++++++++++++++ .../text_sensor/test.esp8266-ard.yaml | 1 + 2 files changed, 67 insertions(+) create mode 100644 tests/components/text_sensor/common.yaml create mode 100644 tests/components/text_sensor/test.esp8266-ard.yaml diff --git a/tests/components/text_sensor/common.yaml b/tests/components/text_sensor/common.yaml new file mode 100644 index 0000000000..4459c0fa44 --- /dev/null +++ b/tests/components/text_sensor/common.yaml @@ -0,0 +1,66 @@ +text_sensor: + - platform: template + name: "Test Substitute Single" + id: test_substitute_single + filters: + - substitute: + - ERROR -> Error + + - platform: template + name: "Test Substitute Multiple" + id: test_substitute_multiple + filters: + - substitute: + - ERROR -> Error + - WARN -> Warning + - INFO -> Information + - DEBUG -> Debug + + - platform: template + name: "Test Substitute Chained" + id: test_substitute_chained + filters: + - substitute: + - foo -> bar + - to_upper + - substitute: + - BAR -> baz + + - platform: template + name: "Test Map Single" + id: test_map_single + filters: + - map: + - ON -> Active + + - platform: template + name: "Test Map Multiple" + id: test_map_multiple + filters: + - map: + - ON -> Active + - OFF -> Inactive + - UNKNOWN -> Error + - IDLE -> Standby + + - platform: template + name: "Test Map Passthrough" + id: test_map_passthrough + filters: + - map: + - Good -> Excellent + - Bad -> Poor + + - platform: template + name: "Test All Filters" + id: test_all_filters + filters: + - to_upper + - to_lower + - append: " suffix" + - prepend: "prefix " + - substitute: + - prefix -> PREFIX + - suffix -> SUFFIX + - map: + - PREFIX text SUFFIX -> mapped diff --git a/tests/components/text_sensor/test.esp8266-ard.yaml b/tests/components/text_sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/text_sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 6c8c049c088dc18480f54876a402779758474b5c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 13:27:36 -1000 Subject: [PATCH 210/336] dry --- esphome/components/text_sensor/filter.cpp | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/esphome/components/text_sensor/filter.cpp b/esphome/components/text_sensor/filter.cpp index 92cf8fdb9b..22d8b38632 100644 --- a/esphome/components/text_sensor/filter.cpp +++ b/esphome/components/text_sensor/filter.cpp @@ -62,12 +62,7 @@ optional AppendFilter::new_value(std::string value) { return value optional PrependFilter::new_value(std::string value) { return this->prefix_ + value; } // Substitute -SubstituteFilter::SubstituteFilter(std::initializer_list substitutions) { - this->substitutions_.init(substitutions.size()); - for (auto &sub : substitutions) { - this->substitutions_.push_back(std::move(sub)); - } -} +SubstituteFilter::SubstituteFilter(std::initializer_list substitutions) : substitutions_(substitutions) {} optional SubstituteFilter::new_value(std::string value) { std::size_t pos; @@ -79,12 +74,7 @@ optional SubstituteFilter::new_value(std::string value) { } // Map -MapFilter::MapFilter(std::initializer_list mappings) { - this->mappings_.init(mappings.size()); - for (auto &mapping : mappings) { - this->mappings_.push_back(std::move(mapping)); - } -} +MapFilter::MapFilter(std::initializer_list mappings) : mappings_(mappings) {} optional MapFilter::new_value(std::string value) { for (const auto &mapping : this->mappings_) { From b698b458098f17315edcb587c9d40e2dac7b148e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 14:11:49 -1000 Subject: [PATCH 211/336] [sensor,text_sensor,binary_sensor] Optimize filter parameters with std::initializer_list --- esphome/components/binary_sensor/binary_sensor.cpp | 2 +- esphome/components/binary_sensor/binary_sensor.h | 4 ++-- esphome/components/sensor/sensor.cpp | 4 ++-- esphome/components/sensor/sensor.h | 6 +++--- esphome/components/text_sensor/text_sensor.cpp | 4 ++-- esphome/components/text_sensor/text_sensor.h | 6 +++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/esphome/components/binary_sensor/binary_sensor.cpp b/esphome/components/binary_sensor/binary_sensor.cpp index 39319d3c1c..33b3de6d72 100644 --- a/esphome/components/binary_sensor/binary_sensor.cpp +++ b/esphome/components/binary_sensor/binary_sensor.cpp @@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) { last_filter->next_ = filter; } } -void BinarySensor::add_filters(const std::vector &filters) { +void BinarySensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } diff --git a/esphome/components/binary_sensor/binary_sensor.h b/esphome/components/binary_sensor/binary_sensor.h index 2bd17d97c9..c1661d710f 100644 --- a/esphome/components/binary_sensor/binary_sensor.h +++ b/esphome/components/binary_sensor/binary_sensor.h @@ -4,7 +4,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/filter.h" -#include +#include namespace esphome { @@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase, public EntityBase_DeviceCl void publish_initial_state(bool new_state); void add_filter(Filter *filter); - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); // ========== INTERNAL METHODS ========== // (In most use cases you won't need these) diff --git a/esphome/components/sensor/sensor.cpp b/esphome/components/sensor/sensor.cpp index 4292b8c0bc..92da4345b7 100644 --- a/esphome/components/sensor/sensor.cpp +++ b/esphome/components/sensor/sensor.cpp @@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void Sensor::add_filters(const std::vector &filters) { +void Sensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void Sensor::set_filters(const std::vector &filters) { +void Sensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/sensor/sensor.h b/esphome/components/sensor/sensor.h index f3fa601a5e..a4210e5e6c 100644 --- a/esphome/components/sensor/sensor.h +++ b/esphome/components/sensor/sensor.h @@ -6,7 +6,7 @@ #include "esphome/core/log.h" #include "esphome/components/sensor/filter.h" -#include +#include #include namespace esphome { @@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa * SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values * }); */ - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); diff --git a/esphome/components/text_sensor/text_sensor.cpp b/esphome/components/text_sensor/text_sensor.cpp index 17bf20466e..0294d65861 100644 --- a/esphome/components/text_sensor/text_sensor.cpp +++ b/esphome/components/text_sensor/text_sensor.cpp @@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void TextSensor::add_filters(const std::vector &filters) { +void TextSensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void TextSensor::set_filters(const std::vector &filters) { +void TextSensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/text_sensor/text_sensor.h b/esphome/components/text_sensor/text_sensor.h index abbea27b59..db2e857ae3 100644 --- a/esphome/components/text_sensor/text_sensor.h +++ b/esphome/components/text_sensor/text_sensor.h @@ -5,7 +5,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/text_sensor/filter.h" -#include +#include #include namespace esphome { @@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass { void add_filter(Filter *filter); /// Add a list of vectors to the back of the filter chain. - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); From 3847989c0f5d4e72673b2f7ce8d54515acd88d68 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:10:49 -1000 Subject: [PATCH 212/336] wip --- esphome/components/esp8266/__init__.py | 6 +- esphome/components/esp8266/iram_fix.py.script | 133 +++++++++++++++--- .../build_components_base.esp8266-ard.yaml | 6 +- 3 files changed, 120 insertions(+), 25 deletions(-) diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index 9d8e6b7d1e..8eab9946f2 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -230,9 +230,9 @@ async def to_code(config): # For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;` cg.add_build_flag("-DNEW_OOM_ABORT") - # In testing mode, fake a larger IRAM to allow linking grouped component tests - # Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB - # This is done via a pre-build script that generates a custom linker script + # In testing mode, fake larger memory to allow linking grouped component tests + # Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing + # we pretend it has much larger memory to test that components compile together if CORE.testing_mode: cg.add_build_flag("-DESPHOME_TESTING_MODE") diff --git a/esphome/components/esp8266/iram_fix.py.script b/esphome/components/esp8266/iram_fix.py.script index 96bddc2ced..d6c4170a18 100644 --- a/esphome/components/esp8266/iram_fix.py.script +++ b/esphome/components/esp8266/iram_fix.py.script @@ -5,8 +5,108 @@ import re Import("env") # noqa +def apply_memory_patches(content): + """Apply IRAM, DRAM, and Flash patches to linker script content. + + Args: + content: Linker script content as string + + Returns: + Patched content as string + """ + patches_applied = [] + + # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) + # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 + new_content = re.sub( + r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", + r"\g<1>0x200000", + content, + ) + if new_content != content: + patches_applied.append("IRAM: 32KB -> 2MB") + content = new_content + + # Replace DRAM (BSS) size to allow larger uninitialized data sections + # The line looks like: dram0_0_seg : org = 0x3FFE8000, len = 0x14000 + # Increase from 0x14000 (80KB) to 0x200000 (2MB) + new_content = re.sub( + r"(dram0_0_seg\s*:\s*org\s*=\s*0x3FFE8000\s*,\s*len\s*=\s*)0x14000", + r"\g<1>0x200000", + content, + ) + if new_content != content: + patches_applied.append("DRAM: 80KB -> 2MB") + content = new_content + + # Replace Flash/irom0 size to allow larger code sections + # The line looks like: irom0_0_seg : org = 0x40201010, len = 0xfeff0 + # Increase from 0xfeff0 (~1MB) to 0x2000000 (32MB) - fake huge flash for testing + new_content = re.sub( + r"(irom0_0_seg\s*:\s*org\s*=\s*0x40201010\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", + r"\g<1>0x2000000", + content, + ) + if new_content != content: + patches_applied.append("Flash: 1MB -> 32MB") + content = new_content + + if patches_applied: + print(f" Patches applied: {', '.join(patches_applied)}") + + return content + + +def patch_linker_script_file(filepath, description): + """Patch a single linker script file in place.""" + if not os.path.exists(filepath): + print(f"ESPHome: {description} not found at {filepath}") + return False + + print(f"ESPHome: Patching {description}...") + with open(filepath, "r") as f: + content = f.read() + + patched_content = apply_memory_patches(content) + + if patched_content != content: + with open(filepath, "w") as f: + f.write(patched_content) + print(f"ESPHome: Successfully patched {description}") + return True + else: + print(f"ESPHome: {description} already patched or no changes needed") + return False + + +def patch_sdk_linker_script_immediately(env): + """Patch SDK linker scripts immediately when script loads. + + This must happen BEFORE PlatformIO's builder calculates sizes. + """ + # Get the SDK linker script path + ldscript = env.GetProjectOption("board_build.ldscript", "") + if not ldscript: + return + + # Get the framework directory + framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") + if not framework_dir: + return + + # Patch the main SDK linker script (flash layout) + sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + if os.path.exists(sdk_ld): + patch_linker_script_file(sdk_ld, f"SDK {ldscript}") + + # Also patch the local.eagle.app.v6.common.ld in SDK (contains IRAM and DRAM) + local_common = os.path.join(framework_dir, "tools", "sdk", "ld", "local.eagle.app.v6.common.ld") + if os.path.exists(local_common): + patch_linker_script_file(local_common, "SDK local.eagle.app.v6.common.ld") + + def patch_linker_script_after_preprocess(source, target, env): - """Patch the local linker script after PlatformIO preprocesses it.""" + """Patch linker scripts after PlatformIO preprocesses them.""" # Check if we're in testing mode by looking for the define build_flags = env.get("BUILD_FLAGS", []) testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) @@ -14,29 +114,20 @@ def patch_linker_script_after_preprocess(source, target, env): if not testing_mode: return - # Get the local linker script path - build_dir = env.subst("$BUILD_DIR") - local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld") + # Patch SDK linker scripts first (for size calculation) + patch_sdk_linker_script_immediately(env) - if not os.path.exists(local_ld): + # Patch build directory scripts + build_dir = env.subst("$BUILD_DIR") + ld_dir = os.path.join(build_dir, "ld") + + if not os.path.exists(ld_dir): return - # Read the linker script - with open(local_ld, "r") as f: - content = f.read() - - # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) - # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 - updated = re.sub( - r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", - r"\g<1>0x200000", - content, - ) - - if updated != content: - with open(local_ld, "w") as f: - f.write(updated) - print("ESPHome: Patched IRAM size to 2MB for testing mode") + # Patch the local linker script (contains IRAM and DRAM definitions) + local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld") + if os.path.exists(local_ld): + patch_linker_script_file(local_ld, "build local.eagle.app.v6.common.ld") # Hook into the build process right before linking diff --git a/tests/test_build_components/build_components_base.esp8266-ard.yaml b/tests/test_build_components/build_components_base.esp8266-ard.yaml index e4d6607c86..8e2a5461f3 100644 --- a/tests/test_build_components/build_components_base.esp8266-ard.yaml +++ b/tests/test_build_components/build_components_base.esp8266-ard.yaml @@ -1,9 +1,13 @@ esphome: name: componenttestesp8266ard friendly_name: $component_name + platformio_options: + board_upload.flash_size: 16MB + board_upload.maximum_size: 16777216 + board_build.ldscript: eagle.flash.16m14m.ld esp8266: - board: d1_mini + board: d1_mini_pro logger: level: VERY_VERBOSE From 5b568073291101144aa51a673184f8eddc0cdb8a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:11:43 -1000 Subject: [PATCH 213/336] wip --- esphome/components/esp8266/__init__.py | 10 ++++++---- .../{iram_fix.py.script => testing_mode.py.script} | 0 2 files changed, 6 insertions(+), 4 deletions(-) rename esphome/components/esp8266/{iram_fix.py.script => testing_mode.py.script} (100%) diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index 8eab9946f2..a74f9ee8ce 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -190,7 +190,9 @@ async def to_code(config): cg.add_define("ESPHOME_VARIANT", "ESP8266") cg.add_define(ThreadModel.SINGLE) - cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"]) + cg.add_platformio_option( + "extra_scripts", ["pre:testing_mode.py", "post:post_build.py"] + ) conf = config[CONF_FRAMEWORK] cg.add_platformio_option("framework", "arduino") @@ -271,8 +273,8 @@ def copy_files(): post_build_file, CORE.relative_build_path("post_build.py"), ) - iram_fix_file = dir / "iram_fix.py.script" + testing_mode_file = dir / "testing_mode.py.script" copy_file_if_changed( - iram_fix_file, - CORE.relative_build_path("iram_fix.py"), + testing_mode_file, + CORE.relative_build_path("testing_mode.py"), ) diff --git a/esphome/components/esp8266/iram_fix.py.script b/esphome/components/esp8266/testing_mode.py.script similarity index 100% rename from esphome/components/esp8266/iram_fix.py.script rename to esphome/components/esp8266/testing_mode.py.script From ce6d0cd8460cdd0ca02f5b1838a85f5511fcbb97 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:17:49 -1000 Subject: [PATCH 214/336] tweak --- .../components/esp8266/testing_mode.py.script | 87 +++++++++---------- .../build_components_base.esp8266-ard.yaml | 4 - 2 files changed, 43 insertions(+), 48 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index d6c4170a18..0b59c2e000 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -79,57 +79,56 @@ def patch_linker_script_file(filepath, description): return False -def patch_sdk_linker_script_immediately(env): - """Patch SDK linker scripts immediately when script loads. - - This must happen BEFORE PlatformIO's builder calculates sizes. - """ - # Get the SDK linker script path - ldscript = env.GetProjectOption("board_build.ldscript", "") - if not ldscript: - return - - # Get the framework directory - framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") - if not framework_dir: - return - - # Patch the main SDK linker script (flash layout) - sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) - if os.path.exists(sdk_ld): - patch_linker_script_file(sdk_ld, f"SDK {ldscript}") - - # Also patch the local.eagle.app.v6.common.ld in SDK (contains IRAM and DRAM) - local_common = os.path.join(framework_dir, "tools", "sdk", "ld", "local.eagle.app.v6.common.ld") - if os.path.exists(local_common): - patch_linker_script_file(local_common, "SDK local.eagle.app.v6.common.ld") - - -def patch_linker_script_after_preprocess(source, target, env): - """Patch linker scripts after PlatformIO preprocesses them.""" - # Check if we're in testing mode by looking for the define +def patch_local_linker_script(source, target, env): + """Patch the local.eagle.app.v6.common.ld in build directory for IRAM.""" + # Check if we're in testing mode build_flags = env.get("BUILD_FLAGS", []) testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) if not testing_mode: return - # Patch SDK linker scripts first (for size calculation) - patch_sdk_linker_script_immediately(env) - - # Patch build directory scripts + # Patch the local linker script if it exists build_dir = env.subst("$BUILD_DIR") ld_dir = os.path.join(build_dir, "ld") - - if not os.path.exists(ld_dir): - return - - # Patch the local linker script (contains IRAM and DRAM definitions) - local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld") - if os.path.exists(local_ld): - patch_linker_script_file(local_ld, "build local.eagle.app.v6.common.ld") + if os.path.exists(ld_dir): + local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld") + if os.path.exists(local_ld): + patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld") -# Hook into the build process right before linking -# This runs after PlatformIO has already preprocessed the linker scripts -env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess) +# Check if we're in testing mode +build_flags = env.get("BUILD_FLAGS", []) +testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + +if testing_mode: + # Create custom linker script immediately (before linker command is built) + build_dir = env.subst("$BUILD_DIR") + ldscript = env.GetProjectOption("board_build.ldscript", "") + + if ldscript: + framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") + if framework_dir: + sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + custom_ld = os.path.join(build_dir, f"testing_{ldscript}") + + if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): + # Read and patch the SDK linker script + with open(sdk_ld, "r") as f: + content = f.read() + + patched_content = apply_memory_patches(content) + + # Write custom linker script + with open(custom_ld, "w") as f: + f.write(patched_content) + + print(f"ESPHome: Created custom linker script: {custom_ld}") + + # Tell the linker to use our custom script + if os.path.exists(custom_ld): + env.Replace(LDSCRIPT_PATH=custom_ld) + print(f"ESPHome: Using custom linker script with patched memory limits") + + # Hook to patch local.eagle.app.v6.common.ld after it's created + env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script) diff --git a/tests/test_build_components/build_components_base.esp8266-ard.yaml b/tests/test_build_components/build_components_base.esp8266-ard.yaml index 8e2a5461f3..1e2d614392 100644 --- a/tests/test_build_components/build_components_base.esp8266-ard.yaml +++ b/tests/test_build_components/build_components_base.esp8266-ard.yaml @@ -1,10 +1,6 @@ esphome: name: componenttestesp8266ard friendly_name: $component_name - platformio_options: - board_upload.flash_size: 16MB - board_upload.maximum_size: 16777216 - board_build.ldscript: eagle.flash.16m14m.ld esp8266: board: d1_mini_pro From 5bd7342ff434fc30498ab31b438073f7eb4f9218 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:19:06 -1000 Subject: [PATCH 215/336] wip --- .../components/esp8266/testing_mode.py.script | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index 0b59c2e000..b1e476ca29 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -105,30 +105,31 @@ if testing_mode: # Create custom linker script immediately (before linker command is built) build_dir = env.subst("$BUILD_DIR") ldscript = env.GetProjectOption("board_build.ldscript", "") + assert ldscript, "No linker script configured in board_build.ldscript" - if ldscript: - framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") - if framework_dir: - sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) - custom_ld = os.path.join(build_dir, f"testing_{ldscript}") + framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") + assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package" - if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): - # Read and patch the SDK linker script - with open(sdk_ld, "r") as f: - content = f.read() + sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + custom_ld = os.path.join(build_dir, f"testing_{ldscript}") - patched_content = apply_memory_patches(content) + if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): + # Read and patch the SDK linker script + with open(sdk_ld, "r") as f: + content = f.read() - # Write custom linker script - with open(custom_ld, "w") as f: - f.write(patched_content) + patched_content = apply_memory_patches(content) - print(f"ESPHome: Created custom linker script: {custom_ld}") + # Write custom linker script + with open(custom_ld, "w") as f: + f.write(patched_content) - # Tell the linker to use our custom script - if os.path.exists(custom_ld): - env.Replace(LDSCRIPT_PATH=custom_ld) - print(f"ESPHome: Using custom linker script with patched memory limits") + print(f"ESPHome: Created custom linker script: {custom_ld}") + + # Tell the linker to use our custom script + assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}" + env.Replace(LDSCRIPT_PATH=custom_ld) + print(f"ESPHome: Using custom linker script with patched memory limits") # Hook to patch local.eagle.app.v6.common.ld after it's created env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script) From 6a042188c1a5d7d4709c2cdb81249b5337ebe6a4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:19:40 -1000 Subject: [PATCH 216/336] wip --- esphome/components/esp8266/testing_mode.py.script | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index b1e476ca29..964304a69d 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -102,7 +102,8 @@ build_flags = env.get("BUILD_FLAGS", []) testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) if testing_mode: - # Create custom linker script immediately (before linker command is built) + # Create a custom linker script in the build directory with patched memory limits + # This allows larger IRAM/DRAM/Flash for CI component grouping tests build_dir = env.subst("$BUILD_DIR") ldscript = env.GetProjectOption("board_build.ldscript", "") assert ldscript, "No linker script configured in board_build.ldscript" @@ -110,26 +111,29 @@ if testing_mode: framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package" + # Read the original SDK linker script (read-only, SDK is never modified) sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + # Create a custom version in the build directory (isolated, temporary) custom_ld = os.path.join(build_dir, f"testing_{ldscript}") if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): - # Read and patch the SDK linker script + # Read the SDK linker script with open(sdk_ld, "r") as f: content = f.read() + # Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB) patched_content = apply_memory_patches(content) - # Write custom linker script + # Write the patched linker script to the build directory with open(custom_ld, "w") as f: f.write(patched_content) print(f"ESPHome: Created custom linker script: {custom_ld}") - # Tell the linker to use our custom script + # Tell the linker to use our custom script from the build directory assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}" env.Replace(LDSCRIPT_PATH=custom_ld) print(f"ESPHome: Using custom linker script with patched memory limits") - # Hook to patch local.eagle.app.v6.common.ld after it's created + # Also patch local.eagle.app.v6.common.ld after PlatformIO creates it env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script) From 09951d190c86bd142c87aa4eddd2b4f1b1d11e1a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:21:11 -1000 Subject: [PATCH 217/336] wip --- .../components/esp8266/testing_mode.py.script | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index 964304a69d..1869a39df6 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -16,43 +16,41 @@ def apply_memory_patches(content): """ patches_applied = [] - # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) - # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 + # Patch IRAM segment to 2MB (for larger code in IRAM) + # Matches: iram1_0_seg : org = 0x..., len = 0x... new_content = re.sub( - r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", + r"(iram1_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", r"\g<1>0x200000", content, ) if new_content != content: - patches_applied.append("IRAM: 32KB -> 2MB") + patches_applied.append("IRAM") content = new_content - # Replace DRAM (BSS) size to allow larger uninitialized data sections - # The line looks like: dram0_0_seg : org = 0x3FFE8000, len = 0x14000 - # Increase from 0x14000 (80KB) to 0x200000 (2MB) + # Patch DRAM segment to 2MB (for larger BSS/data sections) + # Matches: dram0_0_seg : org = 0x..., len = 0x... new_content = re.sub( - r"(dram0_0_seg\s*:\s*org\s*=\s*0x3FFE8000\s*,\s*len\s*=\s*)0x14000", + r"(dram0_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", r"\g<1>0x200000", content, ) if new_content != content: - patches_applied.append("DRAM: 80KB -> 2MB") + patches_applied.append("DRAM") content = new_content - # Replace Flash/irom0 size to allow larger code sections - # The line looks like: irom0_0_seg : org = 0x40201010, len = 0xfeff0 - # Increase from 0xfeff0 (~1MB) to 0x2000000 (32MB) - fake huge flash for testing + # Patch Flash segment to 32MB (for larger code sections) + # Matches: irom0_0_seg : org = 0x..., len = 0x... new_content = re.sub( - r"(irom0_0_seg\s*:\s*org\s*=\s*0x40201010\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", + r"(irom0_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", r"\g<1>0x2000000", content, ) if new_content != content: - patches_applied.append("Flash: 1MB -> 32MB") + patches_applied.append("Flash") content = new_content if patches_applied: - print(f" Patches applied: {', '.join(patches_applied)}") + print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: 2MB, Flash: 32MB)") return content From 4e629dfd899d6ed5b49666563c114ec0acbfe007 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:21:40 -1000 Subject: [PATCH 218/336] wip --- .../components/esp8266/testing_mode.py.script | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index 1869a39df6..b1ff87b85d 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -5,6 +5,24 @@ import re Import("env") # noqa +def patch_segment_size(content, segment_name, new_size, label): + """Patch a memory segment's length in linker script. + + Args: + content: Linker script content + segment_name: Name of the segment (e.g., 'iram1_0_seg') + new_size: New size as hex string (e.g., '0x200000') + label: Human-readable label for logging (e.g., 'IRAM') + + Returns: + Tuple of (patched_content, was_patched) + """ + # Match: segment_name : org = 0x..., len = 0x... + pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+" + new_content = re.sub(pattern, rf"\g<1>{new_size}", content) + return new_content, new_content != content + + def apply_memory_patches(content): """Apply IRAM, DRAM, and Flash patches to linker script content. @@ -16,38 +34,20 @@ def apply_memory_patches(content): """ patches_applied = [] - # Patch IRAM segment to 2MB (for larger code in IRAM) - # Matches: iram1_0_seg : org = 0x..., len = 0x... - new_content = re.sub( - r"(iram1_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", - r"\g<1>0x200000", - content, - ) - if new_content != content: + # Patch IRAM to 2MB (for larger code in IRAM) + content, patched = patch_segment_size(content, "iram1_0_seg", "0x200000", "IRAM") + if patched: patches_applied.append("IRAM") - content = new_content - # Patch DRAM segment to 2MB (for larger BSS/data sections) - # Matches: dram0_0_seg : org = 0x..., len = 0x... - new_content = re.sub( - r"(dram0_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", - r"\g<1>0x200000", - content, - ) - if new_content != content: + # Patch DRAM to 2MB (for larger BSS/data sections) + content, patched = patch_segment_size(content, "dram0_0_seg", "0x200000", "DRAM") + if patched: patches_applied.append("DRAM") - content = new_content - # Patch Flash segment to 32MB (for larger code sections) - # Matches: irom0_0_seg : org = 0x..., len = 0x... - new_content = re.sub( - r"(irom0_0_seg\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+", - r"\g<1>0x2000000", - content, - ) - if new_content != content: + # Patch Flash to 32MB (for larger code sections) + content, patched = patch_segment_size(content, "irom0_0_seg", "0x2000000", "Flash") + if patched: patches_applied.append("Flash") - content = new_content if patches_applied: print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: 2MB, Flash: 32MB)") From c2147a57f19983d791cc4ab502572202d66f2cfd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 15:30:04 -1000 Subject: [PATCH 219/336] bot review --- .../components/esp8266/testing_mode.py.script | 47 +++++++++++++++---- 1 file changed, 38 insertions(+), 9 deletions(-) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script index b1ff87b85d..44d84b765c 100644 --- a/esphome/components/esp8266/testing_mode.py.script +++ b/esphome/components/esp8266/testing_mode.py.script @@ -5,6 +5,12 @@ import re Import("env") # noqa +# Memory sizes for testing mode (allow larger builds for CI component grouping) +TESTING_IRAM_SIZE = "0x200000" # 2MB +TESTING_DRAM_SIZE = "0x200000" # 2MB +TESTING_FLASH_SIZE = "0x2000000" # 32MB + + def patch_segment_size(content, segment_name, new_size, label): """Patch a memory segment's length in linker script. @@ -34,29 +40,43 @@ def apply_memory_patches(content): """ patches_applied = [] - # Patch IRAM to 2MB (for larger code in IRAM) - content, patched = patch_segment_size(content, "iram1_0_seg", "0x200000", "IRAM") + # Patch IRAM (for larger code in IRAM) + content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM") if patched: patches_applied.append("IRAM") - # Patch DRAM to 2MB (for larger BSS/data sections) - content, patched = patch_segment_size(content, "dram0_0_seg", "0x200000", "DRAM") + # Patch DRAM (for larger BSS/data sections) + content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM") if patched: patches_applied.append("DRAM") - # Patch Flash to 32MB (for larger code sections) - content, patched = patch_segment_size(content, "irom0_0_seg", "0x2000000", "Flash") + # Patch Flash (for larger code sections) + content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash") if patched: patches_applied.append("Flash") if patches_applied: - print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: 2MB, Flash: 32MB)") + iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024) + dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024) + flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024) + print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)") return content def patch_linker_script_file(filepath, description): - """Patch a single linker script file in place.""" + """Patch a linker script file in the build directory with enlarged memory segments. + + This function modifies linker scripts in the build directory only (never SDK files). + It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode. + + Args: + filepath: Path to the linker script file in the build directory + description: Human-readable description for logging + + Returns: + True if the file was patched, False if already patched or not found + """ if not os.path.exists(filepath): print(f"ESPHome: {description} not found at {filepath}") return False @@ -78,7 +98,16 @@ def patch_linker_script_file(filepath, description): def patch_local_linker_script(source, target, env): - """Patch the local.eagle.app.v6.common.ld in build directory for IRAM.""" + """Patch the local.eagle.app.v6.common.ld in build directory. + + This patches the preprocessed linker script that PlatformIO creates in the build + directory, enlarging IRAM, DRAM, and Flash segments for testing mode. + + Args: + source: SCons source nodes + target: SCons target nodes + env: SCons environment + """ # Check if we're in testing mode build_flags = env.get("BUILD_FLAGS", []) testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) From 9f668b0c4b6d81cc2213d21534d39abc0973e22f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 16:26:41 -1000 Subject: [PATCH 220/336] Add basic text_sensor tests (#11424) --- tests/components/text_sensor/common.yaml | 66 +++++++++++++++++++ .../text_sensor/test.esp8266-ard.yaml | 1 + 2 files changed, 67 insertions(+) create mode 100644 tests/components/text_sensor/common.yaml create mode 100644 tests/components/text_sensor/test.esp8266-ard.yaml diff --git a/tests/components/text_sensor/common.yaml b/tests/components/text_sensor/common.yaml new file mode 100644 index 0000000000..4459c0fa44 --- /dev/null +++ b/tests/components/text_sensor/common.yaml @@ -0,0 +1,66 @@ +text_sensor: + - platform: template + name: "Test Substitute Single" + id: test_substitute_single + filters: + - substitute: + - ERROR -> Error + + - platform: template + name: "Test Substitute Multiple" + id: test_substitute_multiple + filters: + - substitute: + - ERROR -> Error + - WARN -> Warning + - INFO -> Information + - DEBUG -> Debug + + - platform: template + name: "Test Substitute Chained" + id: test_substitute_chained + filters: + - substitute: + - foo -> bar + - to_upper + - substitute: + - BAR -> baz + + - platform: template + name: "Test Map Single" + id: test_map_single + filters: + - map: + - ON -> Active + + - platform: template + name: "Test Map Multiple" + id: test_map_multiple + filters: + - map: + - ON -> Active + - OFF -> Inactive + - UNKNOWN -> Error + - IDLE -> Standby + + - platform: template + name: "Test Map Passthrough" + id: test_map_passthrough + filters: + - map: + - Good -> Excellent + - Bad -> Poor + + - platform: template + name: "Test All Filters" + id: test_all_filters + filters: + - to_upper + - to_lower + - append: " suffix" + - prepend: "prefix " + - substitute: + - prefix -> PREFIX + - suffix -> SUFFIX + - map: + - PREFIX text SUFFIX -> mapped diff --git a/tests/components/text_sensor/test.esp8266-ard.yaml b/tests/components/text_sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/text_sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 05216db5f0cb397ba3bce637c8ef0d31c1ae0b77 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 16:26:49 -1000 Subject: [PATCH 221/336] ESP8266: Complete testing mode memory patches with DRAM and Flash (#11427) --- esphome/components/esp8266/__init__.py | 16 +- esphome/components/esp8266/iram_fix.py.script | 44 ----- .../components/esp8266/testing_mode.py.script | 166 ++++++++++++++++++ .../build_components_base.esp8266-ard.yaml | 2 +- 4 files changed, 176 insertions(+), 52 deletions(-) delete mode 100644 esphome/components/esp8266/iram_fix.py.script create mode 100644 esphome/components/esp8266/testing_mode.py.script diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index 9d8e6b7d1e..a74f9ee8ce 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -190,7 +190,9 @@ async def to_code(config): cg.add_define("ESPHOME_VARIANT", "ESP8266") cg.add_define(ThreadModel.SINGLE) - cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"]) + cg.add_platformio_option( + "extra_scripts", ["pre:testing_mode.py", "post:post_build.py"] + ) conf = config[CONF_FRAMEWORK] cg.add_platformio_option("framework", "arduino") @@ -230,9 +232,9 @@ async def to_code(config): # For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;` cg.add_build_flag("-DNEW_OOM_ABORT") - # In testing mode, fake a larger IRAM to allow linking grouped component tests - # Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB - # This is done via a pre-build script that generates a custom linker script + # In testing mode, fake larger memory to allow linking grouped component tests + # Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing + # we pretend it has much larger memory to test that components compile together if CORE.testing_mode: cg.add_build_flag("-DESPHOME_TESTING_MODE") @@ -271,8 +273,8 @@ def copy_files(): post_build_file, CORE.relative_build_path("post_build.py"), ) - iram_fix_file = dir / "iram_fix.py.script" + testing_mode_file = dir / "testing_mode.py.script" copy_file_if_changed( - iram_fix_file, - CORE.relative_build_path("iram_fix.py"), + testing_mode_file, + CORE.relative_build_path("testing_mode.py"), ) diff --git a/esphome/components/esp8266/iram_fix.py.script b/esphome/components/esp8266/iram_fix.py.script deleted file mode 100644 index 96bddc2ced..0000000000 --- a/esphome/components/esp8266/iram_fix.py.script +++ /dev/null @@ -1,44 +0,0 @@ -import os -import re - -# pylint: disable=E0602 -Import("env") # noqa - - -def patch_linker_script_after_preprocess(source, target, env): - """Patch the local linker script after PlatformIO preprocesses it.""" - # Check if we're in testing mode by looking for the define - build_flags = env.get("BUILD_FLAGS", []) - testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) - - if not testing_mode: - return - - # Get the local linker script path - build_dir = env.subst("$BUILD_DIR") - local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld") - - if not os.path.exists(local_ld): - return - - # Read the linker script - with open(local_ld, "r") as f: - content = f.read() - - # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) - # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 - updated = re.sub( - r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", - r"\g<1>0x200000", - content, - ) - - if updated != content: - with open(local_ld, "w") as f: - f.write(updated) - print("ESPHome: Patched IRAM size to 2MB for testing mode") - - -# Hook into the build process right before linking -# This runs after PlatformIO has already preprocessed the linker scripts -env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script new file mode 100644 index 0000000000..44d84b765c --- /dev/null +++ b/esphome/components/esp8266/testing_mode.py.script @@ -0,0 +1,166 @@ +import os +import re + +# pylint: disable=E0602 +Import("env") # noqa + + +# Memory sizes for testing mode (allow larger builds for CI component grouping) +TESTING_IRAM_SIZE = "0x200000" # 2MB +TESTING_DRAM_SIZE = "0x200000" # 2MB +TESTING_FLASH_SIZE = "0x2000000" # 32MB + + +def patch_segment_size(content, segment_name, new_size, label): + """Patch a memory segment's length in linker script. + + Args: + content: Linker script content + segment_name: Name of the segment (e.g., 'iram1_0_seg') + new_size: New size as hex string (e.g., '0x200000') + label: Human-readable label for logging (e.g., 'IRAM') + + Returns: + Tuple of (patched_content, was_patched) + """ + # Match: segment_name : org = 0x..., len = 0x... + pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+" + new_content = re.sub(pattern, rf"\g<1>{new_size}", content) + return new_content, new_content != content + + +def apply_memory_patches(content): + """Apply IRAM, DRAM, and Flash patches to linker script content. + + Args: + content: Linker script content as string + + Returns: + Patched content as string + """ + patches_applied = [] + + # Patch IRAM (for larger code in IRAM) + content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM") + if patched: + patches_applied.append("IRAM") + + # Patch DRAM (for larger BSS/data sections) + content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM") + if patched: + patches_applied.append("DRAM") + + # Patch Flash (for larger code sections) + content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash") + if patched: + patches_applied.append("Flash") + + if patches_applied: + iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024) + dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024) + flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024) + print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)") + + return content + + +def patch_linker_script_file(filepath, description): + """Patch a linker script file in the build directory with enlarged memory segments. + + This function modifies linker scripts in the build directory only (never SDK files). + It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode. + + Args: + filepath: Path to the linker script file in the build directory + description: Human-readable description for logging + + Returns: + True if the file was patched, False if already patched or not found + """ + if not os.path.exists(filepath): + print(f"ESPHome: {description} not found at {filepath}") + return False + + print(f"ESPHome: Patching {description}...") + with open(filepath, "r") as f: + content = f.read() + + patched_content = apply_memory_patches(content) + + if patched_content != content: + with open(filepath, "w") as f: + f.write(patched_content) + print(f"ESPHome: Successfully patched {description}") + return True + else: + print(f"ESPHome: {description} already patched or no changes needed") + return False + + +def patch_local_linker_script(source, target, env): + """Patch the local.eagle.app.v6.common.ld in build directory. + + This patches the preprocessed linker script that PlatformIO creates in the build + directory, enlarging IRAM, DRAM, and Flash segments for testing mode. + + Args: + source: SCons source nodes + target: SCons target nodes + env: SCons environment + """ + # Check if we're in testing mode + build_flags = env.get("BUILD_FLAGS", []) + testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + + if not testing_mode: + return + + # Patch the local linker script if it exists + build_dir = env.subst("$BUILD_DIR") + ld_dir = os.path.join(build_dir, "ld") + if os.path.exists(ld_dir): + local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld") + if os.path.exists(local_ld): + patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld") + + +# Check if we're in testing mode +build_flags = env.get("BUILD_FLAGS", []) +testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + +if testing_mode: + # Create a custom linker script in the build directory with patched memory limits + # This allows larger IRAM/DRAM/Flash for CI component grouping tests + build_dir = env.subst("$BUILD_DIR") + ldscript = env.GetProjectOption("board_build.ldscript", "") + assert ldscript, "No linker script configured in board_build.ldscript" + + framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") + assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package" + + # Read the original SDK linker script (read-only, SDK is never modified) + sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + # Create a custom version in the build directory (isolated, temporary) + custom_ld = os.path.join(build_dir, f"testing_{ldscript}") + + if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): + # Read the SDK linker script + with open(sdk_ld, "r") as f: + content = f.read() + + # Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB) + patched_content = apply_memory_patches(content) + + # Write the patched linker script to the build directory + with open(custom_ld, "w") as f: + f.write(patched_content) + + print(f"ESPHome: Created custom linker script: {custom_ld}") + + # Tell the linker to use our custom script from the build directory + assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}" + env.Replace(LDSCRIPT_PATH=custom_ld) + print(f"ESPHome: Using custom linker script with patched memory limits") + + # Also patch local.eagle.app.v6.common.ld after PlatformIO creates it + env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script) diff --git a/tests/test_build_components/build_components_base.esp8266-ard.yaml b/tests/test_build_components/build_components_base.esp8266-ard.yaml index e4d6607c86..1e2d614392 100644 --- a/tests/test_build_components/build_components_base.esp8266-ard.yaml +++ b/tests/test_build_components/build_components_base.esp8266-ard.yaml @@ -3,7 +3,7 @@ esphome: friendly_name: $component_name esp8266: - board: d1_mini + board: d1_mini_pro logger: level: VERY_VERBOSE From 3b6ff615e8e6c34252a9fd8c82e5f003a78516e5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 16:39:15 -1000 Subject: [PATCH 222/336] [ci] Fix clang-tidy split decision to account for component dependencies (#11430) --- script/determine-jobs.py | 63 +++++++++--- tests/script/test_determine_jobs.py | 143 +++++++++++++++++++++++++--- 2 files changed, 180 insertions(+), 26 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 1877894fc4..0d77177e28 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -57,6 +57,7 @@ from helpers import ( get_component_from_path, get_component_test_files, get_components_from_integration_fixtures, + git_ls_files, parse_test_filename, root_path, ) @@ -162,6 +163,26 @@ def should_run_integration_tests(branch: str | None = None) -> bool: return False +@cache +def _is_clang_tidy_full_scan() -> bool: + """Check if clang-tidy configuration changed (requires full scan). + + Returns: + True if full scan is needed (hash changed), False otherwise. + """ + try: + result = subprocess.run( + [os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"], + capture_output=True, + check=False, + ) + # Exit 0 means hash changed (full scan needed) + return result.returncode == 0 + except Exception: + # If hash check fails, run full scan to be safe + return True + + def should_run_clang_tidy(branch: str | None = None) -> bool: """Determine if clang-tidy should run based on changed files. @@ -198,17 +219,7 @@ def should_run_clang_tidy(branch: str | None = None) -> bool: True if clang-tidy should run, False otherwise. """ # First check if clang-tidy configuration changed (full scan needed) - try: - result = subprocess.run( - [os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"], - capture_output=True, - check=False, - ) - # Exit 0 means hash changed (full scan needed) - if result.returncode == 0: - return True - except Exception: - # If hash check fails, run clang-tidy to be safe + if _is_clang_tidy_full_scan(): return True # Check if .clang-tidy.hash file itself was changed @@ -586,13 +597,37 @@ def main() -> None: # Detect components for memory impact analysis (merged config) memory_impact = detect_memory_impact_config(args.branch) + # Determine clang-tidy mode based on actual files that will be checked if run_clang_tidy: - if changed_cpp_file_count < CLANG_TIDY_SPLIT_THRESHOLD: - clang_tidy_mode = "nosplit" - else: + is_full_scan = _is_clang_tidy_full_scan() + + if is_full_scan: + # Full scan checks all files - always use split mode for efficiency clang_tidy_mode = "split" + files_to_check_count = -1 # Sentinel value for "all files" + else: + # Targeted scan - calculate actual files that will be checked + # This accounts for component dependencies, not just directly changed files + if changed_components: + # Count C++ files in all changed components (including dependencies) + all_cpp_files = list(git_ls_files(["*.cpp"]).keys()) + component_set = set(changed_components) + files_to_check_count = sum( + 1 + for f in all_cpp_files + if get_component_from_path(f) in component_set + ) + else: + # If no components changed, use the simple count of changed C++ files + files_to_check_count = changed_cpp_file_count + + if files_to_check_count < CLANG_TIDY_SPLIT_THRESHOLD: + clang_tidy_mode = "nosplit" + else: + clang_tidy_mode = "split" else: clang_tidy_mode = "disabled" + files_to_check_count = 0 # Build output output: dict[str, Any] = { diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 02aaad2e3a..44aea73990 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -71,6 +71,12 @@ def mock_changed_files() -> Generator[Mock, None, None]: yield mock +@pytest.fixture(autouse=True) +def clear_clang_tidy_cache() -> None: + """Clear the clang-tidy full scan cache before each test.""" + determine_jobs._is_clang_tidy_full_scan.cache_clear() + + def test_main_all_tests_should_run( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, @@ -98,7 +104,10 @@ def test_main_all_tests_should_run( mock_subprocess_run.return_value = mock_result # Run main function with mocked argv - with patch("sys.argv", ["determine-jobs.py"]): + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + ): determine_jobs.main() # Check output @@ -224,7 +233,10 @@ def test_main_with_branch_argument( ) mock_subprocess_run.return_value = mock_result - with patch("sys.argv", ["script.py", "-b", "main"]): + with ( + patch("sys.argv", ["script.py", "-b", "main"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + ): determine_jobs.main() # Check that functions were called with branch @@ -363,16 +375,6 @@ def test_should_run_clang_tidy_hash_check_exception() -> None: result = determine_jobs.should_run_clang_tidy() assert result is True # Fail safe - run clang-tidy - # Even with C++ files, exception should trigger clang-tidy - with ( - patch.object( - determine_jobs, "changed_files", return_value=["esphome/core.cpp"] - ), - patch("subprocess.run", side_effect=Exception("Hash check failed")), - ): - result = determine_jobs.should_run_clang_tidy() - assert result is True - def test_should_run_clang_tidy_with_branch() -> None: """Test should_run_clang_tidy with branch argument.""" @@ -763,3 +765,120 @@ def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) - assert result["should_run"] == "true" assert result["components"] == ["wifi"] assert "i2c" not in result["components"] + + +# Tests for clang-tidy split mode logic + + +def test_clang_tidy_mode_full_scan( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_subprocess_run: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that full scan (hash changed) always uses split mode.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Mock list-components.py output + mock_result = Mock() + mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []}) + mock_subprocess_run.return_value = mock_result + + # Mock full scan (hash changed) + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + # Full scan should always use split mode + assert output["clang_tidy_mode"] == "split" + + +@pytest.mark.parametrize( + ("component_count", "files_per_component", "expected_mode"), + [ + # Small PR: 5 files in 1 component -> nosplit + (1, 5, "nosplit"), + # Medium PR: 30 files in 2 components -> nosplit + (2, 15, "nosplit"), + # Medium PR: 64 files total -> nosplit (just under threshold) + (2, 32, "nosplit"), + # Large PR: 65 files total -> split (at threshold) + (2, 33, "split"), # 2 * 33 = 66 files + # Large PR: 100 files in 10 components -> split + (10, 10, "split"), + ], + ids=[ + "1_comp_5_files_nosplit", + "2_comp_30_files_nosplit", + "2_comp_64_files_nosplit_under_threshold", + "2_comp_66_files_split_at_threshold", + "10_comp_100_files_split", + ], +) +def test_clang_tidy_mode_targeted_scan( + component_count: int, + files_per_component: int, + expected_mode: str, + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_subprocess_run: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test clang-tidy mode selection based on files_to_check count.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Create component names + components = [f"comp{i}" for i in range(component_count)] + + # Mock list-components.py output + mock_result = Mock() + mock_result.stdout = json.dumps( + {"directly_changed": components, "all_changed": components} + ) + mock_subprocess_run.return_value = mock_result + + # Mock git_ls_files to return files for each component + cpp_files = { + f"esphome/components/{comp}/file{i}.cpp": 0 + for comp in components + for i in range(files_per_component) + } + + # Create a mock that returns the cpp_files dict for any call + def mock_git_ls_files(patterns=None): + return cpp_files + + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + assert output["clang_tidy_mode"] == expected_mode From a809a137294239e6587b6c7d2df63b7062d44cdc Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Mon, 20 Oct 2025 22:46:50 -0400 Subject: [PATCH 223/336] [core] Add support for extern "C" includes (#11422) --- esphome/const.py | 1 + esphome/core/config.py | 54 ++++++++++++++++++++-------- tests/unit_tests/core/test_config.py | 29 +++++++++++++++ 3 files changed, 70 insertions(+), 14 deletions(-) diff --git a/esphome/const.py b/esphome/const.py index ce1c033e41..3bbc6b8b3f 100644 --- a/esphome/const.py +++ b/esphome/const.py @@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy" CONF_INC_PIN = "inc_pin" CONF_INCLUDE_INTERNAL = "include_internal" CONF_INCLUDES = "includes" +CONF_INCLUDES_C = "includes_c" CONF_INDEX = "index" CONF_INDOOR = "indoor" CONF_INFRARED = "infrared" diff --git a/esphome/core/config.py b/esphome/core/config.py index 8a5876dbcf..2740453808 100644 --- a/esphome/core/config.py +++ b/esphome/core/config.py @@ -21,6 +21,7 @@ from esphome.const import ( CONF_FRIENDLY_NAME, CONF_ID, CONF_INCLUDES, + CONF_INCLUDES_C, CONF_LIBRARIES, CONF_MIN_VERSION, CONF_NAME, @@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All( } ), cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include), + cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include), cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict), cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean, cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean, @@ -302,6 +304,17 @@ def _list_target_platforms(): return target_platforms +def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]: + system_includes = [] + other_includes = [] + for include in includes: + if include.startswith("<") and include.endswith(">"): + system_includes.append(include) + else: + other_includes.append(include) + return system_includes, other_includes + + def preload_core_config(config, result) -> str: with cv.prepend_path(CONF_ESPHOME): conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME]) @@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str: return target_platforms[0] -def include_file(path: Path, basename: Path): +def include_file(path: Path, basename: Path, is_c_header: bool = False): parts = basename.parts dst = CORE.relative_src_path(*parts) copy_file_if_changed(path, dst) @@ -347,7 +360,14 @@ def include_file(path: Path, basename: Path): ext = path.suffix if ext in [".h", ".hpp", ".tcc"]: # Header, add include statement - cg.add_global(cg.RawStatement(f'#include "{basename}"')) + if is_c_header: + # Wrap in extern "C" block for C headers + cg.add_global( + cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}') + ) + else: + # Regular include + cg.add_global(cg.RawStatement(f'#include "{basename}"')) ARDUINO_GLUE_CODE = """\ @@ -377,7 +397,7 @@ async def add_arduino_global_workaround(): @coroutine_with_priority(CoroPriority.FINAL) -async def add_includes(includes: list[str]) -> None: +async def add_includes(includes: list[str], is_c_header: bool = False) -> None: # Add includes at the very end, so that the included files can access global variables for include in includes: path = CORE.relative_config_path(include) @@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None: # Directory, copy tree for p in walk_files(path): basename = p.relative_to(path.parent) - include_file(p, basename) + include_file(p, basename, is_c_header) else: # Copy file basename = Path(path.name) - include_file(path, basename) + include_file(path, basename, is_c_header) @coroutine_with_priority(CoroPriority.FINAL) @@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None: CORE.add_job(add_arduino_global_workaround) if config[CONF_INCLUDES]: - # Get the <...> includes - system_includes = [] - other_includes = [] - for include in config[CONF_INCLUDES]: - if include.startswith("<") and include.endswith(">"): - system_includes.append(include) - else: - other_includes.append(include) + system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES]) # <...> includes should be at the start for include in system_includes: cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True) # Other includes should be at the end - CORE.add_job(add_includes, other_includes) + CORE.add_job(add_includes, other_includes, False) + + if config[CONF_INCLUDES_C]: + system_includes, other_includes = _sort_includes_by_type( + config[CONF_INCLUDES_C] + ) + # <...> includes should be at the start + for include in system_includes: + cg.add_global( + cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'), + prepend=True, + ) + # Other includes should be at the end + CORE.add_job(add_includes, other_includes, True) if project_conf := config.get(CONF_PROJECT): cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME]) diff --git a/tests/unit_tests/core/test_config.py b/tests/unit_tests/core/test_config.py index 4fddfc9678..a1e4627dc9 100644 --- a/tests/unit_tests/core/test_config.py +++ b/tests/unit_tests/core/test_config.py @@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No mock_cg.add_global.assert_not_called() +def test_include_file_with_c_header( + tmp_path: Path, mock_copy_file_if_changed: Mock +) -> None: + """Test include_file wraps header in extern C block when is_c_header is True.""" + src_file = tmp_path / "c_library.h" + src_file.write_text("// C library header") + + CORE.build_path = tmp_path / "build" + + with patch("esphome.core.config.cg") as mock_cg: + # Mock RawStatement to capture the text + mock_raw_statement = MagicMock() + mock_raw_statement.text = "" + + def raw_statement_side_effect(text): + mock_raw_statement.text = text + return mock_raw_statement + + mock_cg.RawStatement.side_effect = raw_statement_side_effect + + config.include_file(src_file, Path("c_library.h"), is_c_header=True) + + mock_copy_file_if_changed.assert_called_once() + mock_cg.add_global.assert_called_once() + # Check that include statement is wrapped in extern "C" block + assert 'extern "C"' in mock_raw_statement.text + assert '#include "c_library.h"' in mock_raw_statement.text + + def test_get_usable_cpu_count() -> None: """Test get_usable_cpu_count returns CPU count.""" count = config.get_usable_cpu_count() From 85959e3004218deb774c7e996a788c913d71830e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 16:47:13 -1000 Subject: [PATCH 224/336] [sensor,text_sensor,binary_sensor] Optimize filter parameters with std::initializer_list (#11426) --- esphome/components/binary_sensor/binary_sensor.cpp | 2 +- esphome/components/binary_sensor/binary_sensor.h | 4 ++-- esphome/components/sensor/sensor.cpp | 4 ++-- esphome/components/sensor/sensor.h | 6 +++--- esphome/components/text_sensor/text_sensor.cpp | 4 ++-- esphome/components/text_sensor/text_sensor.h | 6 +++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/esphome/components/binary_sensor/binary_sensor.cpp b/esphome/components/binary_sensor/binary_sensor.cpp index 39319d3c1c..33b3de6d72 100644 --- a/esphome/components/binary_sensor/binary_sensor.cpp +++ b/esphome/components/binary_sensor/binary_sensor.cpp @@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) { last_filter->next_ = filter; } } -void BinarySensor::add_filters(const std::vector &filters) { +void BinarySensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } diff --git a/esphome/components/binary_sensor/binary_sensor.h b/esphome/components/binary_sensor/binary_sensor.h index 2bd17d97c9..c1661d710f 100644 --- a/esphome/components/binary_sensor/binary_sensor.h +++ b/esphome/components/binary_sensor/binary_sensor.h @@ -4,7 +4,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/filter.h" -#include +#include namespace esphome { @@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase, public EntityBase_DeviceCl void publish_initial_state(bool new_state); void add_filter(Filter *filter); - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); // ========== INTERNAL METHODS ========== // (In most use cases you won't need these) diff --git a/esphome/components/sensor/sensor.cpp b/esphome/components/sensor/sensor.cpp index 4292b8c0bc..92da4345b7 100644 --- a/esphome/components/sensor/sensor.cpp +++ b/esphome/components/sensor/sensor.cpp @@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void Sensor::add_filters(const std::vector &filters) { +void Sensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void Sensor::set_filters(const std::vector &filters) { +void Sensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/sensor/sensor.h b/esphome/components/sensor/sensor.h index f3fa601a5e..a4210e5e6c 100644 --- a/esphome/components/sensor/sensor.h +++ b/esphome/components/sensor/sensor.h @@ -6,7 +6,7 @@ #include "esphome/core/log.h" #include "esphome/components/sensor/filter.h" -#include +#include #include namespace esphome { @@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa * SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values * }); */ - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); diff --git a/esphome/components/text_sensor/text_sensor.cpp b/esphome/components/text_sensor/text_sensor.cpp index 17bf20466e..0294d65861 100644 --- a/esphome/components/text_sensor/text_sensor.cpp +++ b/esphome/components/text_sensor/text_sensor.cpp @@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void TextSensor::add_filters(const std::vector &filters) { +void TextSensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void TextSensor::set_filters(const std::vector &filters) { +void TextSensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/text_sensor/text_sensor.h b/esphome/components/text_sensor/text_sensor.h index abbea27b59..db2e857ae3 100644 --- a/esphome/components/text_sensor/text_sensor.h +++ b/esphome/components/text_sensor/text_sensor.h @@ -5,7 +5,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/text_sensor/filter.h" -#include +#include #include namespace esphome { @@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass { void add_filter(Filter *filter); /// Add a list of vectors to the back of the filter chain. - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); From 040130e35712fcd6e7237c73be575605138cce4a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 17:02:07 -1000 Subject: [PATCH 225/336] [ci] Fix memory impact workflow for new components (#11421) --- script/test_build_components.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/script/test_build_components.py b/script/test_build_components.py index 77c97a8773..e369b0364e 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -966,11 +966,33 @@ def test_components( # Find all component tests all_tests = {} for pattern in component_patterns: + # Skip empty patterns (happens when components list is empty string) + if not pattern: + continue all_tests.update(find_component_tests(tests_dir, pattern, base_only)) + # If no components found, build a reference configuration for baseline comparison + # Create a synthetic "empty" component test that will build just the base config if not all_tests: print(f"No components found matching: {component_patterns}") - return 1 + print( + "Building reference configuration with no components for baseline comparison..." + ) + + # Create empty test files for each platform (or filtered platform) + reference_tests: list[Path] = [] + for platform_name, base_file in platform_bases.items(): + if platform_filter and not platform_name.startswith(platform_filter): + continue + # Create an empty test file named to match the platform + empty_test_file = build_dir / f"reference.{platform_name}.yaml" + empty_test_file.write_text( + "# Empty component test for baseline reference\n" + ) + reference_tests.append(empty_test_file) + + # Add to all_tests dict with component name "reference" + all_tests["reference"] = reference_tests print(f"Found {len(all_tests)} components to test") From 6fe533eddb6f81e4a8ff44affa04a7054b60b4b1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 16:59:11 -1000 Subject: [PATCH 226/336] [core] Optimize automation actions memory usage with std::initializer_list --- esphome/core/automation.h | 4 ++-- esphome/core/base_automation.h | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/esphome/core/automation.h b/esphome/core/automation.h index e156818312..0512752d50 100644 --- a/esphome/core/automation.h +++ b/esphome/core/automation.h @@ -243,7 +243,7 @@ template class ActionList { } this->actions_end_ = action; } - void add_actions(const std::vector *> &actions) { + void add_actions(const std::initializer_list *> &actions) { for (auto *action : actions) { this->add_action(action); } @@ -286,7 +286,7 @@ template class Automation { explicit Automation(Trigger *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); } void add_action(Action *action) { this->actions_.add_action(action); } - void add_actions(const std::vector *> &actions) { this->actions_.add_actions(actions); } + void add_actions(const std::initializer_list *> &actions) { this->actions_.add_actions(actions); } void stop() { this->actions_.stop(); } diff --git a/esphome/core/base_automation.h b/esphome/core/base_automation.h index f1248e0035..af8cde971b 100644 --- a/esphome/core/base_automation.h +++ b/esphome/core/base_automation.h @@ -194,12 +194,12 @@ template class IfAction : public Action { public: explicit IfAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } - void add_else(const std::vector *> &actions) { + void add_else(const std::initializer_list *> &actions) { this->else_.add_actions(actions); this->else_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } @@ -240,7 +240,7 @@ template class WhileAction : public Action { public: WhileAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) { @@ -287,7 +287,7 @@ template class RepeatAction : public Action { public: TEMPLATABLE_VALUE(uint32_t, count) - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](uint32_t iteration, Ts... x) { iteration++; From 77203f0cb4428412191714605c8b5f4e1ee31a4f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 17:24:51 -1000 Subject: [PATCH 227/336] [text_sensor] Optimize filters with FixedVector (1.6KB flash savings) (#11423) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- esphome/components/text_sensor/__init__.py | 25 ++++++++---- esphome/components/text_sensor/filter.cpp | 18 ++++++--- esphome/components/text_sensor/filter.h | 44 ++++++++++++++++------ 3 files changed, 64 insertions(+), 23 deletions(-) diff --git a/esphome/components/text_sensor/__init__.py b/esphome/components/text_sensor/__init__.py index f7b3b5c55e..7a9e947abd 100644 --- a/esphome/components/text_sensor/__init__.py +++ b/esphome/components/text_sensor/__init__.py @@ -110,17 +110,28 @@ def validate_mapping(value): "substitute", SubstituteFilter, cv.ensure_list(validate_mapping) ) async def substitute_filter_to_code(config, filter_id): - from_strings = [conf[CONF_FROM] for conf in config] - to_strings = [conf[CONF_TO] for conf in config] - return cg.new_Pvariable(filter_id, from_strings, to_strings) + substitutions = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, substitutions) @FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping)) async def map_filter_to_code(config, filter_id): - map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string) - return cg.new_Pvariable( - filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config]) - ) + mappings = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, mappings) validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_") diff --git a/esphome/components/text_sensor/filter.cpp b/esphome/components/text_sensor/filter.cpp index 80edae2b6c..a242b43b1c 100644 --- a/esphome/components/text_sensor/filter.cpp +++ b/esphome/components/text_sensor/filter.cpp @@ -62,19 +62,27 @@ optional AppendFilter::new_value(std::string value) { return value optional PrependFilter::new_value(std::string value) { return this->prefix_ + value; } // Substitute +SubstituteFilter::SubstituteFilter(const std::initializer_list &substitutions) + : substitutions_(substitutions) {} + optional SubstituteFilter::new_value(std::string value) { std::size_t pos; - for (size_t i = 0; i < this->from_strings_.size(); i++) { - while ((pos = value.find(this->from_strings_[i])) != std::string::npos) - value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]); + for (const auto &sub : this->substitutions_) { + while ((pos = value.find(sub.from)) != std::string::npos) + value.replace(pos, sub.from.size(), sub.to); } return value; } // Map +MapFilter::MapFilter(const std::initializer_list &mappings) : mappings_(mappings) {} + optional MapFilter::new_value(std::string value) { - auto item = mappings_.find(value); - return item == mappings_.end() ? value : item->second; + for (const auto &mapping : this->mappings_) { + if (mapping.from == value) + return mapping.to; + } + return value; // Pass through if no match } } // namespace text_sensor diff --git a/esphome/components/text_sensor/filter.h b/esphome/components/text_sensor/filter.h index 2de9010b88..c77c221235 100644 --- a/esphome/components/text_sensor/filter.h +++ b/esphome/components/text_sensor/filter.h @@ -2,10 +2,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include -#include -#include -#include namespace esphome { namespace text_sensor { @@ -98,26 +94,52 @@ class PrependFilter : public Filter { std::string prefix_; }; +struct Substitution { + std::string from; + std::string to; +}; + /// A simple filter that replaces a substring with another substring class SubstituteFilter : public Filter { public: - SubstituteFilter(std::vector from_strings, std::vector to_strings) - : from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {} + explicit SubstituteFilter(const std::initializer_list &substitutions); optional new_value(std::string value) override; protected: - std::vector from_strings_; - std::vector to_strings_; + FixedVector substitutions_; }; -/// A filter that maps values from one set to another +/** A filter that maps values from one set to another + * + * Uses linear search instead of std::map for typical small datasets (2-20 mappings). + * Linear search on contiguous memory is faster than red-black tree lookups when: + * - Dataset is small (< ~30 items) + * - Memory is contiguous (cache-friendly, better CPU cache utilization) + * - No pointer chasing overhead (tree node traversal) + * - String comparison cost dominates lookup time + * + * Benchmark results (see benchmark_map_filter.cpp): + * - 2 mappings: Linear 1.26x faster than std::map + * - 5 mappings: Linear 2.25x faster than std::map + * - 10 mappings: Linear 1.83x faster than std::map + * - 20 mappings: Linear 1.59x faster than std::map + * - 30 mappings: Linear 1.09x faster than std::map + * - 40 mappings: std::map 1.27x faster than Linear (break-even) + * + * Benefits over std::map: + * - ~2KB smaller flash (no red-black tree code) + * - ~24-32 bytes less RAM per mapping (no tree node overhead) + * - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare) + * + * Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20 + */ class MapFilter : public Filter { public: - MapFilter(std::map mappings) : mappings_(std::move(mappings)) {} + explicit MapFilter(const std::initializer_list &mappings); optional new_value(std::string value) override; protected: - std::map mappings_; + FixedVector mappings_; }; } // namespace text_sensor From 0938609f7af4c146401e646aa20051c843f63c22 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 21 Oct 2025 16:58:26 +1300 Subject: [PATCH 228/336] [improv] Put next_url behind defines to save flash (#11420) Co-authored-by: J. Nick Koston --- esphome/components/esp32_improv/__init__.py | 2 +- .../components/esp32_improv/esp32_improv_component.cpp | 2 ++ esphome/components/improv_base/__init__.py | 10 +++++++--- esphome/components/improv_base/improv_base.cpp | 3 +++ esphome/components/improv_base/improv_base.h | 5 +++++ esphome/components/improv_serial/__init__.py | 2 +- .../improv_serial/improv_serial_component.cpp | 2 ++ esphome/core/defines.h | 2 ++ 8 files changed, 23 insertions(+), 5 deletions(-) diff --git a/esphome/components/esp32_improv/__init__.py b/esphome/components/esp32_improv/__init__.py index a55c819e6f..1a7194da81 100644 --- a/esphome/components/esp32_improv/__init__.py +++ b/esphome/components/esp32_improv/__init__.py @@ -112,7 +112,7 @@ async def to_code(config): cg.add_define("USE_IMPROV") - await improv_base.setup_improv_core(var, config) + await improv_base.setup_improv_core(var, config, "esp32_improv") cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION])) cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION])) diff --git a/esphome/components/esp32_improv/esp32_improv_component.cpp b/esphome/components/esp32_improv/esp32_improv_component.cpp index 329349b531..56436b9d3d 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.cpp +++ b/esphome/components/esp32_improv/esp32_improv_component.cpp @@ -389,11 +389,13 @@ void ESP32ImprovComponent::check_wifi_connection_() { std::string url_strings[3]; size_t url_count = 0; +#ifdef USE_ESP32_IMPROV_NEXT_URL // Add next_url if configured (should be first per Improv BLE spec) std::string next_url = this->get_formatted_next_url_(); if (!next_url.empty()) { url_strings[url_count++] = std::move(next_url); } +#endif // Add default URLs for backward compatibility url_strings[url_count++] = ESPHOME_MY_LINK; diff --git a/esphome/components/improv_base/__init__.py b/esphome/components/improv_base/__init__.py index aa75f4d89c..e175aa2220 100644 --- a/esphome/components/improv_base/__init__.py +++ b/esphome/components/improv_base/__init__.py @@ -3,6 +3,8 @@ import re import esphome.codegen as cg import esphome.config_validation as cv from esphome.const import __version__ +from esphome.cpp_generator import MockObj +from esphome.types import ConfigType CODEOWNERS = ["@esphome/core"] @@ -35,7 +37,9 @@ def _process_next_url(url: str): return url -async def setup_improv_core(var, config): - if CONF_NEXT_URL in config: - cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL]))) +async def setup_improv_core(var: MockObj, config: ConfigType, component: str): + if next_url := config.get(CONF_NEXT_URL): + cg.add(var.set_next_url(_process_next_url(next_url))) + cg.add_define(f"USE_{component.upper()}_NEXT_URL") + cg.add_library("improv/Improv", "1.2.4") diff --git a/esphome/components/improv_base/improv_base.cpp b/esphome/components/improv_base/improv_base.cpp index 233098e6cd..2091390f95 100644 --- a/esphome/components/improv_base/improv_base.cpp +++ b/esphome/components/improv_base/improv_base.cpp @@ -2,10 +2,12 @@ #include "esphome/components/network/util.h" #include "esphome/core/application.h" +#include "esphome/core/defines.h" namespace esphome { namespace improv_base { +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}"; static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1; static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}"; @@ -43,6 +45,7 @@ std::string ImprovBase::get_formatted_next_url_() { return formatted_url; } +#endif } // namespace improv_base } // namespace esphome diff --git a/esphome/components/improv_base/improv_base.h b/esphome/components/improv_base/improv_base.h index 90cd02a4ab..e4138479df 100644 --- a/esphome/components/improv_base/improv_base.h +++ b/esphome/components/improv_base/improv_base.h @@ -1,17 +1,22 @@ #pragma once #include +#include "esphome/core/defines.h" namespace esphome { namespace improv_base { class ImprovBase { public: +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) void set_next_url(const std::string &next_url) { this->next_url_ = next_url; } +#endif protected: +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) std::string get_formatted_next_url_(); std::string next_url_; +#endif }; } // namespace improv_base diff --git a/esphome/components/improv_serial/__init__.py b/esphome/components/improv_serial/__init__.py index 568b200a85..fb2b541707 100644 --- a/esphome/components/improv_serial/__init__.py +++ b/esphome/components/improv_serial/__init__.py @@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger async def to_code(config): var = cg.new_Pvariable(config[CONF_ID]) await cg.register_component(var, config) - await improv_base.setup_improv_core(var, config) + await improv_base.setup_improv_core(var, config, "improv_serial") diff --git a/esphome/components/improv_serial/improv_serial_component.cpp b/esphome/components/improv_serial/improv_serial_component.cpp index 28245dcfdf..ce82504d3c 100644 --- a/esphome/components/improv_serial/improv_serial_component.cpp +++ b/esphome/components/improv_serial/improv_serial_component.cpp @@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() { std::vector ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) { std::vector urls; +#ifdef USE_IMPROV_SERIAL_NEXT_URL if (!this->next_url_.empty()) { urls.push_back(this->get_formatted_next_url_()); } +#endif #ifdef USE_WEBSERVER for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) { if (ip.is_ip4()) { diff --git a/esphome/core/defines.h b/esphome/core/defines.h index ff9afb9114..4e9fb078a0 100644 --- a/esphome/core/defines.h +++ b/esphome/core/defines.h @@ -44,6 +44,7 @@ #define USE_GRAPHICAL_DISPLAY_MENU #define USE_HOMEASSISTANT_TIME #define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT +#define USE_IMPROV_SERIAL_NEXT_URL #define USE_JSON #define USE_LIGHT #define USE_LOCK @@ -186,6 +187,7 @@ #define USE_ESP32_CAMERA_JPEG_ENCODER #define USE_I2C #define USE_IMPROV +#define USE_ESP32_IMPROV_NEXT_URL #define USE_MICROPHONE #define USE_PSRAM #define USE_SOCKET_IMPL_BSD_SOCKETS From 73f5d01c2dec18f675cfae7ebfad6dbe8326e305 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 18:32:58 -1000 Subject: [PATCH 229/336] [core] Optimize automation actions memory usage with std::initializer_list (#11433) --- esphome/core/automation.h | 4 ++-- esphome/core/base_automation.h | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/esphome/core/automation.h b/esphome/core/automation.h index e156818312..0512752d50 100644 --- a/esphome/core/automation.h +++ b/esphome/core/automation.h @@ -243,7 +243,7 @@ template class ActionList { } this->actions_end_ = action; } - void add_actions(const std::vector *> &actions) { + void add_actions(const std::initializer_list *> &actions) { for (auto *action : actions) { this->add_action(action); } @@ -286,7 +286,7 @@ template class Automation { explicit Automation(Trigger *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); } void add_action(Action *action) { this->actions_.add_action(action); } - void add_actions(const std::vector *> &actions) { this->actions_.add_actions(actions); } + void add_actions(const std::initializer_list *> &actions) { this->actions_.add_actions(actions); } void stop() { this->actions_.stop(); } diff --git a/esphome/core/base_automation.h b/esphome/core/base_automation.h index f1248e0035..af8cde971b 100644 --- a/esphome/core/base_automation.h +++ b/esphome/core/base_automation.h @@ -194,12 +194,12 @@ template class IfAction : public Action { public: explicit IfAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } - void add_else(const std::vector *> &actions) { + void add_else(const std::initializer_list *> &actions) { this->else_.add_actions(actions); this->else_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } @@ -240,7 +240,7 @@ template class WhileAction : public Action { public: WhileAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) { @@ -287,7 +287,7 @@ template class RepeatAction : public Action { public: TEMPLATABLE_VALUE(uint32_t, count) - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](uint32_t iteration, Ts... x) { iteration++; From 9ee0e20aa8f48c6b72fca00ed4545280000fb2b7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 19:11:16 -1000 Subject: [PATCH 230/336] [espnow] Fix compilation error with initializer_list after #11433 --- esphome/components/espnow/automation.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/espnow/automation.h b/esphome/components/espnow/automation.h index 2416377859..5415b088fd 100644 --- a/esphome/components/espnow/automation.h +++ b/esphome/components/espnow/automation.h @@ -14,13 +14,13 @@ template class SendAction : public Action, public Parente TEMPLATABLE_VALUE(std::vector, data); public: - void add_on_sent(const std::vector *> &actions) { + void add_on_sent(const std::initializer_list *> &actions) { this->sent_.add_actions(actions); if (this->flags_.wait_for_sent) { this->sent_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } } - void add_on_error(const std::vector *> &actions) { + void add_on_error(const std::initializer_list *> &actions) { this->error_.add_actions(actions); if (this->flags_.wait_for_sent) { this->error_.add_action(new LambdaAction([this](Ts... x) { From cd2d3f061d78018b2b16b35adb137e1e9b47cff6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 19:58:24 -1000 Subject: [PATCH 231/336] [espnow] Fix compilation error with initializer_list after #11433 (#11436) --- esphome/components/espnow/automation.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/esphome/components/espnow/automation.h b/esphome/components/espnow/automation.h index 2416377859..5415b088fd 100644 --- a/esphome/components/espnow/automation.h +++ b/esphome/components/espnow/automation.h @@ -14,13 +14,13 @@ template class SendAction : public Action, public Parente TEMPLATABLE_VALUE(std::vector, data); public: - void add_on_sent(const std::vector *> &actions) { + void add_on_sent(const std::initializer_list *> &actions) { this->sent_.add_actions(actions); if (this->flags_.wait_for_sent) { this->sent_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } } - void add_on_error(const std::vector *> &actions) { + void add_on_error(const std::initializer_list *> &actions) { this->error_.add_actions(actions); if (this->flags_.wait_for_sent) { this->error_.add_action(new LambdaAction([this](Ts... x) { From 226d9a4796b62c123870308211a2b049381e2243 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:12:51 -1000 Subject: [PATCH 232/336] more cleanup --- esphome/analyze_memory/cli.py | 27 ++- esphome/analyze_memory/const.py | 288 ++++++++++++++++++++++++-------- 2 files changed, 242 insertions(+), 73 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 1695a00c19..80edde950c 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -231,9 +231,30 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): api_component = (name, mem) break - # Combine all components to analyze: top ESPHome + all external + API if not already included - components_to_analyze = list(top_esphome_components) + list( - top_external_components + # Also include wifi_stack and other important system components if they exist + system_components_to_include = [ + "wifi_stack", + "bluetooth", + "network_stack", + "cpp_runtime", + "other", + "libc", + "phy_radio", + "mdns_lib", + "nvs", + "ota", + "arduino_core", + ] + system_components = [] + for name, mem in components: + if name in system_components_to_include: + system_components.append((name, mem)) + + # Combine all components to analyze: top ESPHome + all external + API if not already included + system components + components_to_analyze = ( + list(top_esphome_components) + + list(top_external_components) + + system_components ) if api_component and api_component not in components_to_analyze: components_to_analyze.append(api_component) diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index c60b70aeec..0410788fdd 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -127,40 +127,39 @@ SYMBOL_PATTERNS = { "tryget_socket_unconn", "cs_create_ctrl_sock", "netbuf_alloc", + "tcp_", # TCP protocol functions + "udp_", # UDP protocol functions + "lwip_", # LwIP stack functions + "eagle_lwip", # ESP-specific LwIP functions + "new_linkoutput", # Link output function + "acd_", # Address Conflict Detection (ACD) + "eth_", # Ethernet functions + "mac_enable_bb", # MAC baseband enable + "reassemble_and_dispatch", # Packet reassembly ], + # dhcp must come before libc to avoid "dhcp_select" matching "select" pattern + "dhcp": ["dhcp", "handle_dhcp"], "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], - "wifi_stack": [ - "ieee80211", - "hostap", - "sta_", - "ap_", - "scan_", - "wifi_", - "wpa_", - "wps_", - "esp_wifi", - "cnx_", - "wpa3_", - "sae_", - "wDev_", - "ic_", - "mac_", - "esf_buf", - "gWpaSm", - "sm_WPA", - "eapol_", - "owe_", - "wifiLowLevelInit", - "s_do_mapping", - "gScanStruct", - "ppSearchTxframe", - "ppMapWaitTxq", - "ppFillAMPDUBar", - "ppCheckTxConnTrafficIdle", - "ppCalTkipMic", + # Order matters! More specific categories must come before general ones. + # mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern + "mdns_lib": ["mdns"], + # memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + "esp_mmu_map", + "mmu_hal_", + "s_do_mapping", # Memory mapping function, not WiFi + "hash_map_", # Hash map data structure + "umm_assimilate", # UMM malloc assimilation ], - "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], - "wifi_bt_coex": ["coex"], + # Bluetooth categories must come BEFORE wifi_stack to avoid misclassification + # Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], "bluedroid_bt": [ "bluedroid", @@ -207,6 +206,60 @@ SYMBOL_PATTERNS = { "copy_extra_byte_in_db", "parse_read_local_supported_commands_response", ], + "bluetooth": [ + "bt_", + "_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_" + "l2c_", + "l2ble_", # L2CAP for BLE + "gatt_", + "gap_", + "hci_", + "btsnd_hcic_", # Bluetooth HCI command send functions + "BT_init", + "BT_tx_", # Bluetooth transmit functions + "esp_ble_", # Catch esp_ble_* functions + ], + "bluetooth_ll": [ + "llm_", # Link layer manager + "llc_", # Link layer control + "lld_", # Link layer driver + "llcp_", # Link layer control protocol + "lmp_", # Link manager protocol + ], + "wifi_bt_coex": ["coex"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_" + "wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_mac_", # More specific than "mac_" to avoid matching emac_ + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + # Removed "s_do_mapping" - this is memory management, not WiFi + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + "phy_force_wifi", + "phy_unforce_wifi", + "write_wifi_chan", + "wifi_track_pll", + ], "crypto_math": [ "ecp_", "bignum_", @@ -231,13 +284,36 @@ SYMBOL_PATTERNS = { "p_256_init_curve", "shift_sub_rows", "rshift", + "rijndaelEncrypt", # AES Rijndael encryption + ], + # System and Arduino core functions must come before libc + "esp_system": [ + "system_", # ESP system functions + "postmortem_", # Postmortem reporting + ], + "arduino_core": [ + "pinMode", + "resetPins", + "millis", + "micros", + "delay(", # More specific - Arduino delay function with parenthesis + "delayMicroseconds", + "digitalWrite", + "digitalRead", + ], + "sntp": ["sntp_", "sntp_recv"], + "scheduler": [ + "run_scheduled_", + "compute_scheduled_", + "event_TaskQueue", ], "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], "libc": [ "printf", "scanf", "malloc", - "free", + "_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring + "umm_free", # UMM malloc free function "memcpy", "memset", "strcpy", @@ -259,7 +335,7 @@ SYMBOL_PATTERNS = { "_setenv_r", "_tzset_unlocked_r", "__tzcalc_limits", - "select", + "_select", # More specific than "select" to avoid matching "dhcp_select", etc. "scalbnf", "strtof", "strtof_l", @@ -316,8 +392,24 @@ SYMBOL_PATTERNS = { "CSWTCH$", "dst$", "sulp", + "_strtol_l", # String to long with locale + "__cvt", # Convert + "__utoa", # Unsigned to ASCII + "__global_locale", # Global locale + "_ctype_", # Character type + "impure_data", # Impure data + ], + "string_ops": [ + "strcmp", + "strncmp", + "strchr", + "strstr", + "strtok", + "strdup", + "strncasecmp_P", # String compare (case insensitive, from program memory) + "strnlen_P", # String length (from program memory) + "strncat_P", # String concatenate (from program memory) ], - "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], "file_io": [ "fread", @@ -338,10 +430,26 @@ SYMBOL_PATTERNS = { "vsscanf", ], "cpp_anonymous": ["_GLOBAL__N_", "n$"], - "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], - "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], + # Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS + "nvs": ["nvs_"], # Plain C NVS functions + "ota": ["ota_", "OTA", "esp_ota", "app_desc"], + # cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly + # Only keep patterns that are truly runtime-specific and not categorizable by namespace + "cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": [ + "__cxa_", + "_Unwind_", + "__gcc_personality", + "uw_frame_state", + "search_object", # Search for exception handling object + "get_cie_encoding", # Get CIE encoding + "add_fdes", # Add frame description entries + "fde_unencoded_compare", # Compare FDEs + "fde_mixed_encoding_compare", # Compare mixed encoding FDEs + "frame_downheap", # Frame heap operations + "frame_heapsort", # Frame heap sorting + ], "static_init": ["_GLOBAL__sub_I_"], - "mdns_lib": ["mdns"], "phy_radio": [ "phy_", "rf_", @@ -394,10 +502,47 @@ SYMBOL_PATTERNS = { "txcal_debuge_mode", "ant_wifitx_cfg", "reg_init_begin", + "tx_cap_init", # TX capacitance init + "ram_set_txcap", # RAM TX capacitance setting + "tx_atten_", # TX attenuation + "txiq_", # TX I/Q calibration + "ram_cal_", # RAM calibration + "ram_rxiq_", # RAM RX I/Q + "readvdd33", # Read VDD33 + "test_tout", # Test timeout + "tsen_meas", # Temperature sensor measurement + "bbpll_cal", # Baseband PLL calibration + "set_cal_", # Set calibration + "set_rfanagain_", # Set RF analog gain + "set_txdc_", # Set TX DC + "get_vdd33_", # Get VDD33 + "gen_rx_gain_table", # Generate RX gain table + "ram_ana_inf_gating_en", # RAM analog interface gating enable + "tx_cont_en", # TX continuous enable + "tx_delay_cfg", # TX delay configuration + "tx_gain_table_set", # TX gain table set + "check_and_reset_hw_deadlock", # Hardware deadlock check + "s_config", # System/hardware config + "chan14_mic_cfg", # Channel 14 MIC config + ], + "wifi_phy_pp": [ + "pp_", + "ppT", + "ppR", + "ppP", + "ppInstall", + "ppCalTxAMPDULength", + "ppCheckTx", # Packet processor TX check + "ppCal", # Packet processor calibration + "HdlAllBuffedEb", # Handle buffered EB ], - "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], "wifi_lmac": ["lmac"], - "wifi_device": ["wdev", "wDev_"], + "wifi_device": [ + "wdev", + "wDev_", + "ic_set_sta", # Set station mode + "ic_set_vif", # Set virtual interface + ], "power_mgmt": [ "pm_", "sleep", @@ -406,15 +551,7 @@ SYMBOL_PATTERNS = { "deep_sleep", "power_down", "g_pm", - ], - "memory_mgmt": [ - "mem_", - "memory_", - "tlsf_", - "memp_", - "pbuf_", - "pbuf_alloc", - "pbuf_copy_partial_pbuf", + "pmc", # Power Management Controller ], "hal_layer": ["hal_"], "clock_mgmt": [ @@ -439,7 +576,6 @@ SYMBOL_PATTERNS = { "error_handling": ["panic", "abort", "assert", "error_", "fault"], "authentication": ["auth"], "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], - "dhcp": ["dhcp", "handle_dhcp"], "ethernet_phy": [ "emac_", "eth_phy_", @@ -618,7 +754,15 @@ SYMBOL_PATTERNS = { "ampdu_dispatch_upto", ], "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], - "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], + "rate_control": [ + "rssi_margin", + "rcGetSched", + "get_rate_fcc_index", + "rcGetRate", # Get rate + "rc_get_", # Rate control getters + "rc_set_", # Rate control setters + "rc_enable_", # Rate control enable functions + ], "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], "channel_mgmt": ["chm_init", "chm_set_current_channel"], "trace": ["trc_init", "trc_onAmpduOp"], @@ -799,31 +943,18 @@ SYMBOL_PATTERNS = { "supports_interlaced_inquiry_scan", "supports_reading_remote_extended_features", ], - "bluetooth_ll": [ - "lld_pdu_", - "ld_acl_", - "lld_stop_ind_handler", - "lld_evt_winsize_change", - "config_lld_evt_funcs_reset", - "config_lld_funcs_reset", - "config_llm_funcs_reset", - "llm_set_long_adv_data", - "lld_retry_tx_prog", - "llc_link_sup_to_ind_handler", - "config_llc_funcs_reset", - "lld_evt_rxwin_compute", - "config_btdm_funcs_reset", - "config_ea_funcs_reset", - "llc_defalut_state_tab_reset", - "config_rwip_funcs_reset", - "ke_lmp_rx_flooding_detect", - ], } # Demangled patterns: patterns found in demangled C++ names DEMANGLED_PATTERNS = { "gpio_driver": ["GPIO"], "uart_driver": ["UART"], + # mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder + "mdns_lib": [ + "MDNSResponder", + "MDNSImplementation", + "MDNS", + ], "network_stack": [ "lwip", "tcp", @@ -836,6 +967,24 @@ DEMANGLED_PATTERNS = { "ethernet", "ppp", "slip", + "UdpContext", # UDP context class + "DhcpServer", # DHCP server class + ], + "arduino_core": [ + "String::", # Arduino String class + "Print::", # Arduino Print class + "HardwareSerial::", # Serial class + "IPAddress::", # IP address class + "EspClass::", # ESP class + "experimental::_SPI", # Experimental SPI + ], + "ota": [ + "UpdaterClass", + "Updater::", + ], + "wifi": [ + "ESP8266WiFi", + "WiFi::", ], "wifi_stack": ["NetworkInterface"], "nimble_bt": [ @@ -854,7 +1003,6 @@ DEMANGLED_PATTERNS = { "rtti": ["__type_info", "__class_type_info"], "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], "async_tcp": ["AsyncClient", "AsyncServer"], - "mdns_lib": ["mdns"], "json_lib": [ "ArduinoJson", "JsonDocument", From b006f03080326b8bbf732cfb7dbb4385119ac2cf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:13:20 -1000 Subject: [PATCH 233/336] more cleanup --- esphome/analyze_memory/const.py | 1 + 1 file changed, 1 insertion(+) diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index 0410788fdd..78af82059f 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -223,6 +223,7 @@ SYMBOL_PATTERNS = { "llm_", # Link layer manager "llc_", # Link layer control "lld_", # Link layer driver + "ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented) "llcp_", # Link layer control protocol "lmp_", # Link manager protocol ], From c6370bb410b4b437d2eed7e16b8433b689bd737f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:15:12 -1000 Subject: [PATCH 234/336] more cleanup --- esphome/analyze_memory/cli.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 80edde950c..e1ddd490e7 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -233,17 +233,8 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): # Also include wifi_stack and other important system components if they exist system_components_to_include = [ - "wifi_stack", - "bluetooth", - "network_stack", - "cpp_runtime", - "other", - "libc", - "phy_radio", - "mdns_lib", - "nvs", - "ota", - "arduino_core", + # Empty list - we've finished debugging symbol categorization + # Add component names here if you need to debug their symbols ] system_components = [] for name, mem in components: From bc572aeec5e61a29419d116ff75c65f5a2e3df78 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:21:27 -1000 Subject: [PATCH 235/336] preen --- esphome/analyze_memory/cli.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index e1ddd490e7..718f42330d 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -236,10 +236,11 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): # Empty list - we've finished debugging symbol categorization # Add component names here if you need to debug their symbols ] - system_components = [] - for name, mem in components: - if name in system_components_to_include: - system_components.append((name, mem)) + system_components = [ + (name, mem) + for name, mem in components + if name in system_components_to_include + ] # Combine all components to analyze: top ESPHome + all external + API if not already included + system components components_to_analyze = ( From 7a2887e2ed24544da3ca6510d2ee1494c4685eba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:39:05 -1000 Subject: [PATCH 236/336] [analyze-memory] Improve symbol categorization accuracy (#11440) --- esphome/analyze_memory/cli.py | 19 ++- esphome/analyze_memory/const.py | 289 ++++++++++++++++++++++++-------- 2 files changed, 235 insertions(+), 73 deletions(-) diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py index 1695a00c19..718f42330d 100644 --- a/esphome/analyze_memory/cli.py +++ b/esphome/analyze_memory/cli.py @@ -231,9 +231,22 @@ class MemoryAnalyzerCLI(MemoryAnalyzer): api_component = (name, mem) break - # Combine all components to analyze: top ESPHome + all external + API if not already included - components_to_analyze = list(top_esphome_components) + list( - top_external_components + # Also include wifi_stack and other important system components if they exist + system_components_to_include = [ + # Empty list - we've finished debugging symbol categorization + # Add component names here if you need to debug their symbols + ] + system_components = [ + (name, mem) + for name, mem in components + if name in system_components_to_include + ] + + # Combine all components to analyze: top ESPHome + all external + API if not already included + system components + components_to_analyze = ( + list(top_esphome_components) + + list(top_external_components) + + system_components ) if api_component and api_component not in components_to_analyze: components_to_analyze.append(api_component) diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py index c60b70aeec..78af82059f 100644 --- a/esphome/analyze_memory/const.py +++ b/esphome/analyze_memory/const.py @@ -127,40 +127,39 @@ SYMBOL_PATTERNS = { "tryget_socket_unconn", "cs_create_ctrl_sock", "netbuf_alloc", + "tcp_", # TCP protocol functions + "udp_", # UDP protocol functions + "lwip_", # LwIP stack functions + "eagle_lwip", # ESP-specific LwIP functions + "new_linkoutput", # Link output function + "acd_", # Address Conflict Detection (ACD) + "eth_", # Ethernet functions + "mac_enable_bb", # MAC baseband enable + "reassemble_and_dispatch", # Packet reassembly ], + # dhcp must come before libc to avoid "dhcp_select" matching "select" pattern + "dhcp": ["dhcp", "handle_dhcp"], "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], - "wifi_stack": [ - "ieee80211", - "hostap", - "sta_", - "ap_", - "scan_", - "wifi_", - "wpa_", - "wps_", - "esp_wifi", - "cnx_", - "wpa3_", - "sae_", - "wDev_", - "ic_", - "mac_", - "esf_buf", - "gWpaSm", - "sm_WPA", - "eapol_", - "owe_", - "wifiLowLevelInit", - "s_do_mapping", - "gScanStruct", - "ppSearchTxframe", - "ppMapWaitTxq", - "ppFillAMPDUBar", - "ppCheckTxConnTrafficIdle", - "ppCalTkipMic", + # Order matters! More specific categories must come before general ones. + # mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern + "mdns_lib": ["mdns"], + # memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + "esp_mmu_map", + "mmu_hal_", + "s_do_mapping", # Memory mapping function, not WiFi + "hash_map_", # Hash map data structure + "umm_assimilate", # UMM malloc assimilation ], - "bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"], - "wifi_bt_coex": ["coex"], + # Bluetooth categories must come BEFORE wifi_stack to avoid misclassification + # Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], "bluedroid_bt": [ "bluedroid", @@ -207,6 +206,61 @@ SYMBOL_PATTERNS = { "copy_extra_byte_in_db", "parse_read_local_supported_commands_response", ], + "bluetooth": [ + "bt_", + "_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_" + "l2c_", + "l2ble_", # L2CAP for BLE + "gatt_", + "gap_", + "hci_", + "btsnd_hcic_", # Bluetooth HCI command send functions + "BT_init", + "BT_tx_", # Bluetooth transmit functions + "esp_ble_", # Catch esp_ble_* functions + ], + "bluetooth_ll": [ + "llm_", # Link layer manager + "llc_", # Link layer control + "lld_", # Link layer driver + "ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented) + "llcp_", # Link layer control protocol + "lmp_", # Link manager protocol + ], + "wifi_bt_coex": ["coex"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_" + "wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_mac_", # More specific than "mac_" to avoid matching emac_ + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + # Removed "s_do_mapping" - this is memory management, not WiFi + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + "phy_force_wifi", + "phy_unforce_wifi", + "write_wifi_chan", + "wifi_track_pll", + ], "crypto_math": [ "ecp_", "bignum_", @@ -231,13 +285,36 @@ SYMBOL_PATTERNS = { "p_256_init_curve", "shift_sub_rows", "rshift", + "rijndaelEncrypt", # AES Rijndael encryption + ], + # System and Arduino core functions must come before libc + "esp_system": [ + "system_", # ESP system functions + "postmortem_", # Postmortem reporting + ], + "arduino_core": [ + "pinMode", + "resetPins", + "millis", + "micros", + "delay(", # More specific - Arduino delay function with parenthesis + "delayMicroseconds", + "digitalWrite", + "digitalRead", + ], + "sntp": ["sntp_", "sntp_recv"], + "scheduler": [ + "run_scheduled_", + "compute_scheduled_", + "event_TaskQueue", ], "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], "libc": [ "printf", "scanf", "malloc", - "free", + "_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring + "umm_free", # UMM malloc free function "memcpy", "memset", "strcpy", @@ -259,7 +336,7 @@ SYMBOL_PATTERNS = { "_setenv_r", "_tzset_unlocked_r", "__tzcalc_limits", - "select", + "_select", # More specific than "select" to avoid matching "dhcp_select", etc. "scalbnf", "strtof", "strtof_l", @@ -316,8 +393,24 @@ SYMBOL_PATTERNS = { "CSWTCH$", "dst$", "sulp", + "_strtol_l", # String to long with locale + "__cvt", # Convert + "__utoa", # Unsigned to ASCII + "__global_locale", # Global locale + "_ctype_", # Character type + "impure_data", # Impure data + ], + "string_ops": [ + "strcmp", + "strncmp", + "strchr", + "strstr", + "strtok", + "strdup", + "strncasecmp_P", # String compare (case insensitive, from program memory) + "strnlen_P", # String length (from program memory) + "strncat_P", # String concatenate (from program memory) ], - "string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"], "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], "file_io": [ "fread", @@ -338,10 +431,26 @@ SYMBOL_PATTERNS = { "vsscanf", ], "cpp_anonymous": ["_GLOBAL__N_", "n$"], - "cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"], - "exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"], + # Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS + "nvs": ["nvs_"], # Plain C NVS functions + "ota": ["ota_", "OTA", "esp_ota", "app_desc"], + # cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly + # Only keep patterns that are truly runtime-specific and not categorizable by namespace + "cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": [ + "__cxa_", + "_Unwind_", + "__gcc_personality", + "uw_frame_state", + "search_object", # Search for exception handling object + "get_cie_encoding", # Get CIE encoding + "add_fdes", # Add frame description entries + "fde_unencoded_compare", # Compare FDEs + "fde_mixed_encoding_compare", # Compare mixed encoding FDEs + "frame_downheap", # Frame heap operations + "frame_heapsort", # Frame heap sorting + ], "static_init": ["_GLOBAL__sub_I_"], - "mdns_lib": ["mdns"], "phy_radio": [ "phy_", "rf_", @@ -394,10 +503,47 @@ SYMBOL_PATTERNS = { "txcal_debuge_mode", "ant_wifitx_cfg", "reg_init_begin", + "tx_cap_init", # TX capacitance init + "ram_set_txcap", # RAM TX capacitance setting + "tx_atten_", # TX attenuation + "txiq_", # TX I/Q calibration + "ram_cal_", # RAM calibration + "ram_rxiq_", # RAM RX I/Q + "readvdd33", # Read VDD33 + "test_tout", # Test timeout + "tsen_meas", # Temperature sensor measurement + "bbpll_cal", # Baseband PLL calibration + "set_cal_", # Set calibration + "set_rfanagain_", # Set RF analog gain + "set_txdc_", # Set TX DC + "get_vdd33_", # Get VDD33 + "gen_rx_gain_table", # Generate RX gain table + "ram_ana_inf_gating_en", # RAM analog interface gating enable + "tx_cont_en", # TX continuous enable + "tx_delay_cfg", # TX delay configuration + "tx_gain_table_set", # TX gain table set + "check_and_reset_hw_deadlock", # Hardware deadlock check + "s_config", # System/hardware config + "chan14_mic_cfg", # Channel 14 MIC config + ], + "wifi_phy_pp": [ + "pp_", + "ppT", + "ppR", + "ppP", + "ppInstall", + "ppCalTxAMPDULength", + "ppCheckTx", # Packet processor TX check + "ppCal", # Packet processor calibration + "HdlAllBuffedEb", # Handle buffered EB ], - "wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"], "wifi_lmac": ["lmac"], - "wifi_device": ["wdev", "wDev_"], + "wifi_device": [ + "wdev", + "wDev_", + "ic_set_sta", # Set station mode + "ic_set_vif", # Set virtual interface + ], "power_mgmt": [ "pm_", "sleep", @@ -406,15 +552,7 @@ SYMBOL_PATTERNS = { "deep_sleep", "power_down", "g_pm", - ], - "memory_mgmt": [ - "mem_", - "memory_", - "tlsf_", - "memp_", - "pbuf_", - "pbuf_alloc", - "pbuf_copy_partial_pbuf", + "pmc", # Power Management Controller ], "hal_layer": ["hal_"], "clock_mgmt": [ @@ -439,7 +577,6 @@ SYMBOL_PATTERNS = { "error_handling": ["panic", "abort", "assert", "error_", "fault"], "authentication": ["auth"], "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], - "dhcp": ["dhcp", "handle_dhcp"], "ethernet_phy": [ "emac_", "eth_phy_", @@ -618,7 +755,15 @@ SYMBOL_PATTERNS = { "ampdu_dispatch_upto", ], "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], - "rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"], + "rate_control": [ + "rssi_margin", + "rcGetSched", + "get_rate_fcc_index", + "rcGetRate", # Get rate + "rc_get_", # Rate control getters + "rc_set_", # Rate control setters + "rc_enable_", # Rate control enable functions + ], "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], "channel_mgmt": ["chm_init", "chm_set_current_channel"], "trace": ["trc_init", "trc_onAmpduOp"], @@ -799,31 +944,18 @@ SYMBOL_PATTERNS = { "supports_interlaced_inquiry_scan", "supports_reading_remote_extended_features", ], - "bluetooth_ll": [ - "lld_pdu_", - "ld_acl_", - "lld_stop_ind_handler", - "lld_evt_winsize_change", - "config_lld_evt_funcs_reset", - "config_lld_funcs_reset", - "config_llm_funcs_reset", - "llm_set_long_adv_data", - "lld_retry_tx_prog", - "llc_link_sup_to_ind_handler", - "config_llc_funcs_reset", - "lld_evt_rxwin_compute", - "config_btdm_funcs_reset", - "config_ea_funcs_reset", - "llc_defalut_state_tab_reset", - "config_rwip_funcs_reset", - "ke_lmp_rx_flooding_detect", - ], } # Demangled patterns: patterns found in demangled C++ names DEMANGLED_PATTERNS = { "gpio_driver": ["GPIO"], "uart_driver": ["UART"], + # mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder + "mdns_lib": [ + "MDNSResponder", + "MDNSImplementation", + "MDNS", + ], "network_stack": [ "lwip", "tcp", @@ -836,6 +968,24 @@ DEMANGLED_PATTERNS = { "ethernet", "ppp", "slip", + "UdpContext", # UDP context class + "DhcpServer", # DHCP server class + ], + "arduino_core": [ + "String::", # Arduino String class + "Print::", # Arduino Print class + "HardwareSerial::", # Serial class + "IPAddress::", # IP address class + "EspClass::", # ESP class + "experimental::_SPI", # Experimental SPI + ], + "ota": [ + "UpdaterClass", + "Updater::", + ], + "wifi": [ + "ESP8266WiFi", + "WiFi::", ], "wifi_stack": ["NetworkInterface"], "nimble_bt": [ @@ -854,7 +1004,6 @@ DEMANGLED_PATTERNS = { "rtti": ["__type_info", "__class_type_info"], "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], "async_tcp": ["AsyncClient", "AsyncServer"], - "mdns_lib": ["mdns"], "json_lib": [ "ArduinoJson", "JsonDocument", From 0b2f5fcd7eec47493a3df7162fabab64080a960f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:39:21 -1000 Subject: [PATCH 237/336] Add additional sensor filter tests (#11438) --- tests/components/sensor/common.yaml | 63 +++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/tests/components/sensor/common.yaml b/tests/components/sensor/common.yaml index 3f81f3f9ef..2180f66da8 100644 --- a/tests/components/sensor/common.yaml +++ b/tests/components/sensor/common.yaml @@ -173,3 +173,66 @@ sensor: timeout: 1000ms value: [42.0] - multiply: 2.0 + + # CalibrateLinearFilter - piecewise linear calibration + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Two Points" + filters: + - calibrate_linear: + - 0.0 -> 0.0 + - 100.0 -> 100.0 + + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Multiple Segments" + filters: + - calibrate_linear: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Least Squares" + filters: + - calibrate_linear: + method: least_squares + datapoints: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + # CalibratePolynomialFilter - polynomial calibration + - platform: copy + source_id: source_sensor + name: "Calibrate Polynomial Degree 2" + filters: + - calibrate_polynomial: + degree: 2 + datapoints: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + - platform: copy + source_id: source_sensor + name: "Calibrate Polynomial Degree 3" + filters: + - calibrate_polynomial: + degree: 3 + datapoints: + - 0.0 -> 0.0 + - 25.0 -> 26.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + # OrFilter - filter branching + - platform: copy + source_id: source_sensor + name: "Or Filter with Multiple Branches" + filters: + - or: + - multiply: 2.0 + - offset: 10.0 + - lambda: return x * 3.0; From 0ae9009e414517506d983425f2801f2731839f1b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:39:50 -1000 Subject: [PATCH 238/336] [ci] Fix clang-tidy split mode for core file changes (#11434) --- script/determine-jobs.py | 44 ++++-- script/helpers.py | 233 +++++++++++++++++++++++++++- script/list-components.py | 180 ++------------------- tests/script/test_determine_jobs.py | 137 ++++++++++------ 4 files changed, 358 insertions(+), 236 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 0d77177e28..9721fd9756 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -43,7 +43,6 @@ from enum import StrEnum from functools import cache import json import os -from pathlib import Path import subprocess import sys from typing import Any @@ -53,10 +52,13 @@ from helpers import ( CPP_FILE_EXTENSIONS, PYTHON_FILE_EXTENSIONS, changed_files, + filter_component_files, get_all_dependencies, + get_changed_components, get_component_from_path, get_component_test_files, get_components_from_integration_fixtures, + get_components_with_dependencies, git_ls_files, parse_test_filename, root_path, @@ -561,16 +563,29 @@ def main() -> None: run_python_linters = should_run_python_linters(args.branch) changed_cpp_file_count = count_changed_cpp_files(args.branch) - # Get both directly changed and all changed components (with dependencies) in one call - script_path = Path(__file__).parent / "list-components.py" - cmd = [sys.executable, str(script_path), "--changed-with-deps"] - if args.branch: - cmd.extend(["-b", args.branch]) + # Get changed components + # get_changed_components() returns: + # None: Core files changed (need full scan) + # []: No components changed + # [list]: Changed components (already includes dependencies) + changed_components_result = get_changed_components() - result = subprocess.run(cmd, capture_output=True, text=True, check=True) - component_data = json.loads(result.stdout) - directly_changed_components = component_data["directly_changed"] - changed_components = component_data["all_changed"] + if changed_components_result is None: + # Core files changed - will trigger full clang-tidy scan + # No specific components to test + changed_components = [] + directly_changed_components = [] + is_core_change = True + else: + # Get both directly changed and all changed (with dependencies) + changed = changed_files(args.branch) + component_files = [f for f in changed if filter_component_files(f)] + + directly_changed_components = get_components_with_dependencies( + component_files, False + ) + changed_components = get_components_with_dependencies(component_files, True) + is_core_change = False # Filter to only components that have test files # Components without tests shouldn't generate CI test jobs @@ -581,11 +596,11 @@ def main() -> None: # Get directly changed components with tests (for isolated testing) # These will be tested WITHOUT --testing-mode in CI to enable full validation # (pin conflicts, etc.) since they contain the actual changes being reviewed - directly_changed_with_tests = [ + directly_changed_with_tests = { component for component in directly_changed_components if _component_has_tests(component) - ] + } # Get dependency-only components (for grouped testing) dependency_only_components = [ @@ -599,7 +614,8 @@ def main() -> None: # Determine clang-tidy mode based on actual files that will be checked if run_clang_tidy: - is_full_scan = _is_clang_tidy_full_scan() + # Full scan needed if: hash changed OR core files changed + is_full_scan = _is_clang_tidy_full_scan() or is_core_change if is_full_scan: # Full scan checks all files - always use split mode for efficiency @@ -638,7 +654,7 @@ def main() -> None: "python_linters": run_python_linters, "changed_components": changed_components, "changed_components_with_tests": changed_components_with_tests, - "directly_changed_components_with_tests": directly_changed_with_tests, + "directly_changed_components_with_tests": list(directly_changed_with_tests), "dependency_only_components_with_tests": dependency_only_components, "component_test_count": len(changed_components_with_tests), "directly_changed_count": len(directly_changed_with_tests), diff --git a/script/helpers.py b/script/helpers.py index edde3d78af..6b2bb2daef 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections.abc import Callable from functools import cache import json import os @@ -7,6 +8,7 @@ import os.path from pathlib import Path import re import subprocess +import sys import time from typing import Any @@ -304,7 +306,10 @@ def get_changed_components() -> list[str] | None: for f in changed ) if core_cpp_changed: - print("Core C++/header files changed - will run full clang-tidy scan") + print( + "Core C++/header files changed - will run full clang-tidy scan", + file=sys.stderr, + ) return None # Use list-components.py to get changed components @@ -318,7 +323,10 @@ def get_changed_components() -> list[str] | None: return parse_list_components_output(result.stdout) except subprocess.CalledProcessError: # If the script fails, fall back to full scan - print("Could not determine changed components - will run full clang-tidy scan") + print( + "Could not determine changed components - will run full clang-tidy scan", + file=sys.stderr, + ) return None @@ -370,14 +378,14 @@ def _filter_changed_ci(files: list[str]) -> list[str]: if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH) ] if not files: - print("No files changed") + print("No files changed", file=sys.stderr) return files # Scenario 3: Specific components changed # Action: Check ALL files in each changed component # Convert component list to set for O(1) lookups component_set = set(components) - print(f"Changed components: {', '.join(sorted(components))}") + print(f"Changed components: {', '.join(sorted(components))}", file=sys.stderr) # The 'files' parameter contains ALL files in the codebase that clang-tidy would check. # We filter this down to only files in the changed components. @@ -648,3 +656,220 @@ def get_components_from_integration_fixtures() -> set[str]: components.add(item["platform"]) return components + + +def filter_component_files(file_path: str) -> bool: + """Check if a file path is a component file. + + Args: + file_path: Path to check + + Returns: + True if the file is in a component directory + """ + return file_path.startswith("esphome/components/") or file_path.startswith( + "tests/components/" + ) + + +def extract_component_names_from_files(files: list[str]) -> list[str]: + """Extract unique component names from a list of file paths. + + Args: + files: List of file paths + + Returns: + List of unique component names (preserves order) + """ + return list( + dict.fromkeys(comp for file in files if (comp := get_component_from_path(file))) + ) + + +def add_item_to_components_graph( + components_graph: dict[str, list[str]], parent: str, child: str +) -> None: + """Add a dependency relationship to the components graph. + + Args: + components_graph: Graph mapping parent components to their children + parent: Parent component name + child: Child component name (dependent) + """ + if not parent.startswith("__") and parent != child: + if parent not in components_graph: + components_graph[parent] = [] + if child not in components_graph[parent]: + components_graph[parent].append(child) + + +def resolve_auto_load( + auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]], + config: dict | None = None, +) -> list[str]: + """Resolve AUTO_LOAD to a list, handling callables with or without config parameter. + + Args: + auto_load: The AUTO_LOAD value (list or callable) + config: Optional config to pass to callable AUTO_LOAD functions + + Returns: + List of component names to auto-load + """ + if not callable(auto_load): + return auto_load + + import inspect + + if inspect.signature(auto_load).parameters: + return auto_load(config) + return auto_load() + + +def create_components_graph() -> dict[str, list[str]]: + """Create a graph of component dependencies. + + Returns: + Dictionary mapping parent components to their children (dependencies) + """ + from pathlib import Path + + from esphome import const + from esphome.core import CORE + from esphome.loader import ComponentManifest, get_component, get_platform + + # The root directory of the repo + root = Path(__file__).parent.parent + components_dir = root / "esphome" / "components" + # Fake some directory so that get_component works + CORE.config_path = root + # Various configuration to capture different outcomes used by `AUTO_LOAD` function. + KEY_CORE = const.KEY_CORE + KEY_TARGET_FRAMEWORK = const.KEY_TARGET_FRAMEWORK + KEY_TARGET_PLATFORM = const.KEY_TARGET_PLATFORM + PLATFORM_ESP32 = const.PLATFORM_ESP32 + PLATFORM_ESP8266 = const.PLATFORM_ESP8266 + + TARGET_CONFIGURATIONS = [ + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32}, + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266}, + ] + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + components_graph = {} + platforms = [] + components: list[tuple[ComponentManifest, str, Path]] = [] + + for path in components_dir.iterdir(): + if not path.is_dir(): + continue + if not (path / "__init__.py").is_file(): + continue + name = path.name + comp = get_component(name) + if comp is None: + raise RuntimeError( + f"Cannot find component {name}. Make sure current path is pip installed ESPHome" + ) + + components.append((comp, name, path)) + if comp.is_platform_component: + platforms.append(name) + + platforms = set(platforms) + + for comp, name, path in components: + for dependency in comp.dependencies: + add_item_to_components_graph( + components_graph, dependency.split(".")[0], name + ) + + for target_config in TARGET_CONFIGURATIONS: + CORE.data[KEY_CORE] = target_config + for item in resolve_auto_load(comp.auto_load, config=None): + add_item_to_components_graph(components_graph, item, name) + # restore config + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + for platform_path in path.iterdir(): + platform_name = platform_path.stem + if platform_name == name or platform_name not in platforms: + continue + platform = get_platform(platform_name, name) + if platform is None: + continue + + add_item_to_components_graph(components_graph, platform_name, name) + + for dependency in platform.dependencies: + add_item_to_components_graph( + components_graph, dependency.split(".")[0], name + ) + + for target_config in TARGET_CONFIGURATIONS: + CORE.data[KEY_CORE] = target_config + for item in resolve_auto_load(platform.auto_load, config={}): + add_item_to_components_graph(components_graph, item, name) + # restore config + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + return components_graph + + +def find_children_of_component( + components_graph: dict[str, list[str]], component_name: str, depth: int = 0 +) -> list[str]: + """Find all components that depend on the given component (recursively). + + Args: + components_graph: Graph mapping parent components to their children + component_name: Component name to find children for + depth: Current recursion depth (max 10) + + Returns: + List of all dependent component names (may contain duplicates removed at end) + """ + if component_name not in components_graph: + return [] + + children = [] + + for child in components_graph[component_name]: + children.append(child) + if depth < 10: + children.extend( + find_children_of_component(components_graph, child, depth + 1) + ) + # Remove duplicate values + return list(set(children)) + + +def get_components_with_dependencies( + files: list[str], get_dependencies: bool = False +) -> list[str]: + """Get component names from files, optionally including their dependencies. + + Args: + files: List of file paths + get_dependencies: If True, include all dependent components + + Returns: + Sorted list of component names + """ + components = extract_component_names_from_files(files) + + if get_dependencies: + components_graph = create_components_graph() + + all_components = components.copy() + for c in components: + all_components.extend(find_children_of_component(components_graph, c)) + # Remove duplicate values + all_changed_components = list(set(all_components)) + + return sorted(all_changed_components) + + return sorted(components) diff --git a/script/list-components.py b/script/list-components.py index 11533ceb30..d768256c71 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -1,24 +1,12 @@ #!/usr/bin/env python3 import argparse -from collections.abc import Callable -from pathlib import Path -import sys -from helpers import changed_files, get_component_from_path, git_ls_files - -from esphome.const import ( - KEY_CORE, - KEY_TARGET_FRAMEWORK, - KEY_TARGET_PLATFORM, - PLATFORM_ESP32, - PLATFORM_ESP8266, +from helpers import ( + changed_files, + filter_component_files, + get_components_with_dependencies, + git_ls_files, ) -from esphome.core import CORE -from esphome.loader import ComponentManifest, get_component, get_platform - - -def filter_component_files(str): - return str.startswith("esphome/components/") | str.startswith("tests/components/") def get_all_component_files() -> list[str]: @@ -27,156 +15,6 @@ def get_all_component_files() -> list[str]: return list(filter(filter_component_files, files)) -def extract_component_names_array_from_files_array(files): - components = [] - for file in files: - component_name = get_component_from_path(file) - if component_name and component_name not in components: - components.append(component_name) - return components - - -def add_item_to_components_graph(components_graph, parent, child): - if not parent.startswith("__") and parent != child: - if parent not in components_graph: - components_graph[parent] = [] - if child not in components_graph[parent]: - components_graph[parent].append(child) - - -def resolve_auto_load( - auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]], - config: dict | None = None, -) -> list[str]: - """Resolve AUTO_LOAD to a list, handling callables with or without config parameter. - - Args: - auto_load: The AUTO_LOAD value (list or callable) - config: Optional config to pass to callable AUTO_LOAD functions - - Returns: - List of component names to auto-load - """ - if not callable(auto_load): - return auto_load - - import inspect - - if inspect.signature(auto_load).parameters: - return auto_load(config) - return auto_load() - - -def create_components_graph(): - # The root directory of the repo - root = Path(__file__).parent.parent - components_dir = root / "esphome" / "components" - # Fake some directory so that get_component works - CORE.config_path = root - # Various configuration to capture different outcomes used by `AUTO_LOAD` function. - TARGET_CONFIGURATIONS = [ - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32}, - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266}, - ] - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - components_graph = {} - platforms = [] - components: list[tuple[ComponentManifest, str, Path]] = [] - - for path in components_dir.iterdir(): - if not path.is_dir(): - continue - if not (path / "__init__.py").is_file(): - continue - name = path.name - comp = get_component(name) - if comp is None: - print( - f"Cannot find component {name}. Make sure current path is pip installed ESPHome" - ) - sys.exit(1) - - components.append((comp, name, path)) - if comp.is_platform_component: - platforms.append(name) - - platforms = set(platforms) - - for comp, name, path in components: - for dependency in comp.dependencies: - add_item_to_components_graph( - components_graph, dependency.split(".")[0], name - ) - - for target_config in TARGET_CONFIGURATIONS: - CORE.data[KEY_CORE] = target_config - for item in resolve_auto_load(comp.auto_load, config=None): - add_item_to_components_graph(components_graph, item, name) - # restore config - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - for platform_path in path.iterdir(): - platform_name = platform_path.stem - if platform_name == name or platform_name not in platforms: - continue - platform = get_platform(platform_name, name) - if platform is None: - continue - - add_item_to_components_graph(components_graph, platform_name, name) - - for dependency in platform.dependencies: - add_item_to_components_graph( - components_graph, dependency.split(".")[0], name - ) - - for target_config in TARGET_CONFIGURATIONS: - CORE.data[KEY_CORE] = target_config - for item in resolve_auto_load(platform.auto_load, config={}): - add_item_to_components_graph(components_graph, item, name) - # restore config - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - return components_graph - - -def find_children_of_component(components_graph, component_name, depth=0): - if component_name not in components_graph: - return [] - - children = [] - - for child in components_graph[component_name]: - children.append(child) - if depth < 10: - children.extend( - find_children_of_component(components_graph, child, depth + 1) - ) - # Remove duplicate values - return list(set(children)) - - -def get_components(files: list[str], get_dependencies: bool = False): - components = extract_component_names_array_from_files_array(files) - - if get_dependencies: - components_graph = create_components_graph() - - all_components = components.copy() - for c in components: - all_components.extend(find_children_of_component(components_graph, c)) - # Remove duplicate values - all_changed_components = list(set(all_components)) - - return sorted(all_changed_components) - - return sorted(components) - - def main(): parser = argparse.ArgumentParser() parser.add_argument( @@ -251,8 +89,8 @@ def main(): # Return JSON with both directly changed and all changed components import json - directly_changed = get_components(files, False) - all_changed = get_components(files, True) + directly_changed = get_components_with_dependencies(files, False) + all_changed = get_components_with_dependencies(files, True) output = { "directly_changed": directly_changed, "all_changed": all_changed, @@ -260,11 +98,11 @@ def main(): print(json.dumps(output)) elif args.changed_direct: # Return only directly changed components (without dependencies) - for c in get_components(files, False): + for c in get_components_with_dependencies(files, False): print(c) else: # Return all changed components (with dependencies) - default behavior - for c in get_components(files, args.changed): + for c in get_components_with_dependencies(files, args.changed): print(c) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 44aea73990..35652e0efc 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -96,17 +96,34 @@ def test_main_all_tests_should_run( mock_should_run_clang_format.return_value = True mock_should_run_python_linters.return_value = True - # Mock list-components.py output (now returns JSON with --changed-with-deps) - mock_result = Mock() - mock_result.stdout = json.dumps( - {"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]} - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return non-component files (to avoid memory impact) + # Memory impact only runs when component C++ files change + mock_changed_files.return_value = [ + "esphome/config.py", + "esphome/helpers.py", + ] # Run main function with mocked argv with ( patch("sys.argv", ["determine-jobs.py"]), patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["wifi", "api", "sensor"], + ), + patch.object( + determine_jobs, + "filter_component_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ["wifi", "api"] + if not deps + else ["wifi", "api", "sensor"], + ), ): determine_jobs.main() @@ -130,9 +147,9 @@ def test_main_all_tests_should_run( # changed_cpp_file_count should be present assert "changed_cpp_file_count" in output assert isinstance(output["changed_cpp_file_count"], int) - # memory_impact should be present + # memory_impact should be false (no component C++ files changed) assert "memory_impact" in output - assert output["memory_impact"]["should_run"] == "false" # No files changed + assert output["memory_impact"]["should_run"] == "false" def test_main_no_tests_should_run( @@ -154,13 +171,18 @@ def test_main_no_tests_should_run( mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False - # Mock empty list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []}) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return no component files + mock_changed_files.return_value = [] # Run main function with mocked argv - with patch("sys.argv", ["determine-jobs.py"]): + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "get_changed_components", return_value=[]), + patch.object(determine_jobs, "filter_component_files", return_value=False), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=[] + ), + ): determine_jobs.main() # Check output @@ -226,16 +248,22 @@ def test_main_with_branch_argument( mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = True - # Mock list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps( - {"directly_changed": ["mqtt"], "all_changed": ["mqtt"]} - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return non-component files (to avoid memory impact) + # Memory impact only runs when component C++ files change + mock_changed_files.return_value = ["esphome/config.py"] with ( patch("sys.argv", ["script.py", "-b", "main"]), patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]), + patch.object( + determine_jobs, + "filter_component_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=["mqtt"] + ), ): determine_jobs.main() @@ -245,13 +273,6 @@ def test_main_with_branch_argument( mock_should_run_clang_format.assert_called_once_with("main") mock_should_run_python_linters.assert_called_once_with("main") - # Check that list-components.py was called with branch - mock_subprocess_run.assert_called_once() - call_args = mock_subprocess_run.call_args[0][0] - assert "--changed-with-deps" in call_args - assert "-b" in call_args - assert "main" in call_args - # Check output captured = capsys.readouterr() output = json.loads(captured.out) @@ -272,7 +293,7 @@ def test_main_with_branch_argument( # changed_cpp_file_count should be present assert "changed_cpp_file_count" in output assert isinstance(output["changed_cpp_file_count"], int) - # memory_impact should be present + # memory_impact should be false (no component C++ files changed) assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" @@ -500,16 +521,11 @@ def test_main_filters_components_without_tests( mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False - # Mock list-components.py output with 3 components - # wifi: has tests, sensor: has tests, airthings_ble: no tests - mock_result = Mock() - mock_result.stdout = json.dumps( - { - "directly_changed": ["wifi", "sensor"], - "all_changed": ["wifi", "sensor", "airthings_ble"], - } - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return component files + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/sensor/sensor.h", + ] # Create test directory structure tests_dir = tmp_path / "tests" / "components" @@ -533,6 +549,23 @@ def test_main_filters_components_without_tests( patch.object(determine_jobs, "root_path", str(tmp_path)), patch.object(helpers, "root_path", str(tmp_path)), patch("sys.argv", ["determine-jobs.py"]), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["wifi", "sensor", "airthings_ble"], + ), + patch.object( + determine_jobs, + "filter_component_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ["wifi", "sensor"] + if not deps + else ["wifi", "sensor", "airthings_ble"], + ), ): # Clear the cache since we're mocking root_path determine_jobs._component_has_tests.cache_clear() @@ -788,15 +821,18 @@ def test_clang_tidy_mode_full_scan( mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False - # Mock list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []}) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return no component files + mock_changed_files.return_value = [] # Mock full scan (hash changed) with ( patch("sys.argv", ["determine-jobs.py"]), patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True), + patch.object(determine_jobs, "get_changed_components", return_value=[]), + patch.object(determine_jobs, "filter_component_files", return_value=False), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=[] + ), ): determine_jobs.main() @@ -853,12 +889,10 @@ def test_clang_tidy_mode_targeted_scan( # Create component names components = [f"comp{i}" for i in range(component_count)] - # Mock list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps( - {"directly_changed": components, "all_changed": components} - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return component files + mock_changed_files.return_value = [ + f"esphome/components/{comp}/file.cpp" for comp in components + ] # Mock git_ls_files to return files for each component cpp_files = { @@ -875,6 +909,15 @@ def test_clang_tidy_mode_targeted_scan( patch("sys.argv", ["determine-jobs.py"]), patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files), + patch.object(determine_jobs, "get_changed_components", return_value=components), + patch.object( + determine_jobs, + "filter_component_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=components + ), ): determine_jobs.main() From 572af76beee697ab2e807a467bc85779f510aa70 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 20:49:12 -1000 Subject: [PATCH 239/336] [esp32] Add advanced options to disable unused VFS features (saves ~5 KB flash) --- esphome/components/esp32/__init__.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 99a87e06f9..a30e4fd1b7 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -550,6 +550,8 @@ CONF_ENABLE_LWIP_BRIDGE_INTERFACE = "enable_lwip_bridge_interface" CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING = "enable_lwip_tcpip_core_locking" CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety" CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram" +CONF_DISABLE_VFS_SUPPORT_TERMIOS = "disable_vfs_support_termios" +CONF_DISABLE_VFS_SUPPORT_SELECT = "disable_vfs_support_select" def _validate_idf_component(config: ConfigType) -> ConfigType: @@ -615,6 +617,12 @@ FRAMEWORK_SCHEMA = cv.All( cv.Optional( CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True + ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_SELECT, default=True + ): cv.boolean, cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean, } ), @@ -962,6 +970,23 @@ async def to_code(config): if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True): add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False) + # Disable VFS support for termios (terminal I/O functions) + # ESPHome doesn't use termios functions on ESP32 (only used in host UART driver). + # Saves approximately 1.8KB of flash when disabled (default). + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_TERMIOS", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True), + ) + + # Disable VFS support for select() with file descriptors + # ESPHome only uses select() with sockets via lwip_select(), which still works. + # VFS select is only needed for UART/eventfd file descriptors, which ESPHome doesn't use. + # Saves approximately 2.7KB of flash when disabled (default). + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_SELECT", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True), + ) + cg.add_platformio_option("board_build.partitions", "partitions.csv") if CONF_PARTITIONS in config: add_extra_build_file( From c3fbfca8446f1d746d01f3ed50b7132d0dbb1d21 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 21:15:23 -1000 Subject: [PATCH 240/336] conditional --- esphome/components/esp32/__init__.py | 57 +++++++++++++++++++++-- esphome/components/openthread/__init__.py | 4 ++ 2 files changed, 56 insertions(+), 5 deletions(-) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index a30e4fd1b7..ef1ed18597 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -552,6 +552,30 @@ CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety" CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram" CONF_DISABLE_VFS_SUPPORT_TERMIOS = "disable_vfs_support_termios" CONF_DISABLE_VFS_SUPPORT_SELECT = "disable_vfs_support_select" +CONF_DISABLE_VFS_SUPPORT_DIR = "disable_vfs_support_dir" + +# VFS requirement tracking +# Components that need VFS features can call require_vfs_select() or require_vfs_dir() +KEY_VFS_SELECT_REQUIRED = "vfs_select_required" +KEY_VFS_DIR_REQUIRED = "vfs_dir_required" + + +def require_vfs_select() -> None: + """Mark that VFS select support is required by a component. + + Call this from components that use esp_vfs_eventfd or other VFS select features. + This prevents CONFIG_VFS_SUPPORT_SELECT from being disabled. + """ + CORE.data[KEY_VFS_SELECT_REQUIRED] = True + + +def require_vfs_dir() -> None: + """Mark that VFS directory support is required by a component. + + Call this from components that use directory functions (opendir, readdir, mkdir, etc.). + This prevents CONFIG_VFS_SUPPORT_DIR from being disabled. + """ + CORE.data[KEY_VFS_DIR_REQUIRED] = True def _validate_idf_component(config: ConfigType) -> ConfigType: @@ -623,6 +647,7 @@ FRAMEWORK_SCHEMA = cv.All( cv.Optional( CONF_DISABLE_VFS_SUPPORT_SELECT, default=True ): cv.boolean, + cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean, cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean, } ), @@ -980,12 +1005,34 @@ async def to_code(config): # Disable VFS support for select() with file descriptors # ESPHome only uses select() with sockets via lwip_select(), which still works. - # VFS select is only needed for UART/eventfd file descriptors, which ESPHome doesn't use. + # VFS select is only needed for UART/eventfd file descriptors. + # Components that need it (e.g., openthread) call require_vfs_select(). # Saves approximately 2.7KB of flash when disabled (default). - add_idf_sdkconfig_option( - "CONFIG_VFS_SUPPORT_SELECT", - not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True), - ) + vfs_select_required = CORE.data.get(KEY_VFS_SELECT_REQUIRED, False) + if vfs_select_required: + # Component requires VFS select - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_SELECT", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_SELECT", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True), + ) + + # Disable VFS support for directory functions (opendir, readdir, mkdir, etc.) + # ESPHome doesn't use directory functions on ESP32. + # Components that need it (e.g., storage components) call require_vfs_dir(). + # Saves approximately 0.5KB+ of flash when disabled (default). + vfs_dir_required = CORE.data.get(KEY_VFS_DIR_REQUIRED, False) + if vfs_dir_required: + # Component requires VFS directory support - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_DIR", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_DIR", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True), + ) cg.add_platformio_option("board_build.partitions", "partitions.csv") if CONF_PARTITIONS in config: diff --git a/esphome/components/openthread/__init__.py b/esphome/components/openthread/__init__.py index 3fac497c3d..5277455eca 100644 --- a/esphome/components/openthread/__init__.py +++ b/esphome/components/openthread/__init__.py @@ -4,6 +4,7 @@ from esphome.components.esp32 import ( VARIANT_ESP32H2, add_idf_sdkconfig_option, only_on_variant, + require_vfs_select, ) from esphome.components.mdns import MDNSComponent, enable_mdns_storage import esphome.config_validation as cv @@ -141,6 +142,9 @@ FINAL_VALIDATE_SCHEMA = _final_validate async def to_code(config): cg.add_define("USE_OPENTHREAD") + # OpenThread uses esp_vfs_eventfd which requires VFS select support + require_vfs_select() + # OpenThread SRP needs access to mDNS services after setup enable_mdns_storage() From abcb2ce4e73319547714d2133dfb8afa3b76fb0d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 21:17:48 -1000 Subject: [PATCH 241/336] conditional --- esphome/components/esp32/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index ef1ed18597..cb6354cc74 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -1008,8 +1008,7 @@ async def to_code(config): # VFS select is only needed for UART/eventfd file descriptors. # Components that need it (e.g., openthread) call require_vfs_select(). # Saves approximately 2.7KB of flash when disabled (default). - vfs_select_required = CORE.data.get(KEY_VFS_SELECT_REQUIRED, False) - if vfs_select_required: + if CORE.data.get(KEY_VFS_SELECT_REQUIRED, False): # Component requires VFS select - force enable regardless of user setting add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_SELECT", True) else: @@ -1023,8 +1022,7 @@ async def to_code(config): # ESPHome doesn't use directory functions on ESP32. # Components that need it (e.g., storage components) call require_vfs_dir(). # Saves approximately 0.5KB+ of flash when disabled (default). - vfs_dir_required = CORE.data.get(KEY_VFS_DIR_REQUIRED, False) - if vfs_dir_required: + if CORE.data.get(KEY_VFS_DIR_REQUIRED, False): # Component requires VFS directory support - force enable regardless of user setting add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_DIR", True) else: From 66afe4a9be8fa379c8c33b2be7ffcfab6992eec9 Mon Sep 17 00:00:00 2001 From: Keith Burzinski Date: Tue, 21 Oct 2025 02:26:18 -0500 Subject: [PATCH 242/336] [climate] Add some integration tests (#11439) --- .../host_mode_climate_basic_state.yaml | 112 ++++++++++++++++++ .../fixtures/host_mode_climate_control.yaml | 108 +++++++++++++++++ .../fixtures/host_mode_many_entities.yaml | 36 +++++- .../test_host_mode_climate_basic_state.py | 49 ++++++++ .../test_host_mode_climate_control.py | 76 ++++++++++++ .../test_host_mode_many_entities.py | 43 +++++++ 6 files changed, 423 insertions(+), 1 deletion(-) create mode 100644 tests/integration/fixtures/host_mode_climate_basic_state.yaml create mode 100644 tests/integration/fixtures/host_mode_climate_control.yaml create mode 100644 tests/integration/test_host_mode_climate_basic_state.py create mode 100644 tests/integration/test_host_mode_climate_control.py diff --git a/tests/integration/fixtures/host_mode_climate_basic_state.yaml b/tests/integration/fixtures/host_mode_climate_basic_state.yaml new file mode 100644 index 0000000000..f79d684fc6 --- /dev/null +++ b/tests/integration/fixtures/host_mode_climate_basic_state.yaml @@ -0,0 +1,112 @@ +esphome: + name: host-climate-test +host: +api: +logger: + +climate: + - platform: thermostat + id: dual_mode_thermostat + name: Dual-mode Thermostat + sensor: host_thermostat_temperature_sensor + humidity_sensor: host_thermostat_humidity_sensor + humidity_hysteresis: 1.0 + min_cooling_off_time: 20s + min_cooling_run_time: 20s + max_cooling_run_time: 30s + supplemental_cooling_delta: 3.0 + min_heating_off_time: 20s + min_heating_run_time: 20s + max_heating_run_time: 30s + supplemental_heating_delta: 3.0 + min_fanning_off_time: 20s + min_fanning_run_time: 20s + min_idle_time: 10s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 + default_preset: home + preset: + - name: "away" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + - name: "home" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + auto_mode: + - logger.log: "AUTO mode set" + heat_cool_mode: + - logger.log: "HEAT_COOL mode set" + cool_action: + - switch.turn_on: air_cond + supplemental_cooling_action: + - switch.turn_on: air_cond_2 + heat_action: + - switch.turn_on: heater + supplemental_heating_action: + - switch.turn_on: heater_2 + dry_action: + - switch.turn_on: air_cond + fan_only_action: + - switch.turn_on: fan_only + idle_action: + - switch.turn_off: air_cond + - switch.turn_off: air_cond_2 + - switch.turn_off: heater + - switch.turn_off: heater_2 + - switch.turn_off: fan_only + humidity_control_humidify_action: + - switch.turn_on: humidifier + humidity_control_off_action: + - switch.turn_off: humidifier + +sensor: + - platform: template + id: host_thermostat_humidity_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 42.0; + update_interval: 0.1s + - platform: template + id: host_thermostat_temperature_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 22.0; + update_interval: 0.1s + +switch: + - platform: template + id: air_cond + name: Air Conditioner + optimistic: true + - platform: template + id: air_cond_2 + name: Air Conditioner 2 + optimistic: true + - platform: template + id: fan_only + name: Fan + optimistic: true + - platform: template + id: heater + name: Heater + optimistic: true + - platform: template + id: heater_2 + name: Heater 2 + optimistic: true + - platform: template + id: dehumidifier + name: Dehumidifier + optimistic: true + - platform: template + id: humidifier + name: Humidifier + optimistic: true diff --git a/tests/integration/fixtures/host_mode_climate_control.yaml b/tests/integration/fixtures/host_mode_climate_control.yaml new file mode 100644 index 0000000000..c60e0597a2 --- /dev/null +++ b/tests/integration/fixtures/host_mode_climate_control.yaml @@ -0,0 +1,108 @@ +esphome: + name: host-climate-test +host: +api: +logger: + +climate: + - platform: thermostat + id: dual_mode_thermostat + name: Dual-mode Thermostat + sensor: host_thermostat_temperature_sensor + humidity_sensor: host_thermostat_humidity_sensor + humidity_hysteresis: 1.0 + min_cooling_off_time: 20s + min_cooling_run_time: 20s + max_cooling_run_time: 30s + supplemental_cooling_delta: 3.0 + min_heating_off_time: 20s + min_heating_run_time: 20s + max_heating_run_time: 30s + supplemental_heating_delta: 3.0 + min_fanning_off_time: 20s + min_fanning_run_time: 20s + min_idle_time: 10s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 + default_preset: home + preset: + - name: "away" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + - name: "home" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + auto_mode: + - logger.log: "AUTO mode set" + heat_cool_mode: + - logger.log: "HEAT_COOL mode set" + cool_action: + - switch.turn_on: air_cond + supplemental_cooling_action: + - switch.turn_on: air_cond_2 + heat_action: + - switch.turn_on: heater + supplemental_heating_action: + - switch.turn_on: heater_2 + dry_action: + - switch.turn_on: air_cond + fan_only_action: + - switch.turn_on: fan_only + idle_action: + - switch.turn_off: air_cond + - switch.turn_off: air_cond_2 + - switch.turn_off: heater + - switch.turn_off: heater_2 + - switch.turn_off: fan_only + humidity_control_humidify_action: + - switch.turn_on: humidifier + humidity_control_off_action: + - switch.turn_off: humidifier + +sensor: + - platform: template + id: host_thermostat_humidity_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 42.0; + update_interval: 0.1s + - platform: template + id: host_thermostat_temperature_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 22.0; + update_interval: 0.1s + +switch: + - platform: template + id: air_cond + name: Air Conditioner + optimistic: true + - platform: template + id: air_cond_2 + name: Air Conditioner 2 + optimistic: true + - platform: template + id: fan_only + name: Fan + optimistic: true + - platform: template + id: heater + name: Heater + optimistic: true + - platform: template + id: heater_2 + name: Heater 2 + optimistic: true + - platform: template + id: humidifier + name: Humidifier + optimistic: true diff --git a/tests/integration/fixtures/host_mode_many_entities.yaml b/tests/integration/fixtures/host_mode_many_entities.yaml index 612186507c..acb03f235b 100644 --- a/tests/integration/fixtures/host_mode_many_entities.yaml +++ b/tests/integration/fixtures/host_mode_many_entities.yaml @@ -210,7 +210,15 @@ sensor: name: "Test Sensor 50" lambda: return 50.0; update_interval: 0.1s - # Temperature sensor for the thermostat + # Sensors for the thermostat + - platform: template + name: "Humidity Sensor" + id: humidity_sensor + lambda: return 35.0; + unit_of_measurement: "%" + device_class: humidity + state_class: measurement + update_interval: 5s - platform: template name: "Temperature Sensor" id: temp_sensor @@ -295,6 +303,11 @@ valve: - logger.log: "Valve stopping" output: + - platform: template + id: humidifier_output + type: binary + write_action: + - logger.log: "Humidifier output changed" - platform: template id: heater_output type: binary @@ -305,18 +318,31 @@ output: type: binary write_action: - logger.log: "Cooler output changed" + - platform: template + id: fan_output + type: binary + write_action: + - logger.log: "Fan output changed" climate: - platform: thermostat name: "Test Thermostat" sensor: temp_sensor + humidity_sensor: humidity_sensor default_preset: Home on_boot_restore_from: default_preset min_heating_off_time: 1s min_heating_run_time: 1s min_cooling_off_time: 1s min_cooling_run_time: 1s + min_fan_mode_switching_time: 1s min_idle_time: 1s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 heat_action: - output.turn_on: heater_output cool_action: @@ -324,6 +350,14 @@ climate: idle_action: - output.turn_off: heater_output - output.turn_off: cooler_output + humidity_control_humidify_action: + - output.turn_on: humidifier_output + humidity_control_off_action: + - output.turn_off: humidifier_output + fan_mode_auto_action: + - output.turn_off: fan_output + fan_mode_on_action: + - output.turn_on: fan_output preset: - name: Home default_target_temperature_low: 20 diff --git a/tests/integration/test_host_mode_climate_basic_state.py b/tests/integration/test_host_mode_climate_basic_state.py new file mode 100644 index 0000000000..4697342a99 --- /dev/null +++ b/tests/integration/test_host_mode_climate_basic_state.py @@ -0,0 +1,49 @@ +"""Integration test for Host mode with climate.""" + +from __future__ import annotations + +import asyncio + +import aioesphomeapi +from aioesphomeapi import ClimateAction, ClimateMode, ClimatePreset, EntityState +import pytest + +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_host_mode_climate_basic_state( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test basic climate state reporting.""" + loop = asyncio.get_running_loop() + async with run_compiled(yaml_config), api_client_connected() as client: + states: dict[int, EntityState] = {} + climate_future: asyncio.Future[EntityState] = loop.create_future() + + def on_state(state: EntityState) -> None: + states[state.key] = state + if ( + isinstance(state, aioesphomeapi.ClimateState) + and not climate_future.done() + ): + climate_future.set_result(state) + + client.subscribe_states(on_state) + + try: + climate_state = await asyncio.wait_for(climate_future, timeout=5.0) + except TimeoutError: + pytest.fail("Climate state not received within 5 seconds") + + assert isinstance(climate_state, aioesphomeapi.ClimateState) + assert climate_state.mode == ClimateMode.OFF + assert climate_state.action == ClimateAction.OFF + assert climate_state.current_temperature == 22.0 + assert climate_state.target_temperature_low == 18.0 + assert climate_state.target_temperature_high == 24.0 + assert climate_state.preset == ClimatePreset.HOME + assert climate_state.current_humidity == 42.0 + assert climate_state.target_humidity == 20.0 diff --git a/tests/integration/test_host_mode_climate_control.py b/tests/integration/test_host_mode_climate_control.py new file mode 100644 index 0000000000..96d15dfae0 --- /dev/null +++ b/tests/integration/test_host_mode_climate_control.py @@ -0,0 +1,76 @@ +"""Integration test for Host mode with climate.""" + +from __future__ import annotations + +import asyncio + +import aioesphomeapi +from aioesphomeapi import ClimateInfo, ClimateMode, EntityState +import pytest + +from .state_utils import InitialStateHelper +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_host_mode_climate_control( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test climate mode control.""" + loop = asyncio.get_running_loop() + async with run_compiled(yaml_config), api_client_connected() as client: + states: dict[int, EntityState] = {} + climate_future: asyncio.Future[EntityState] = loop.create_future() + + def on_state(state: EntityState) -> None: + states[state.key] = state + if ( + isinstance(state, aioesphomeapi.ClimateState) + and state.mode == ClimateMode.HEAT + and state.target_temperature_low == 21.5 + and state.target_temperature_high == 26.5 + and not climate_future.done() + ): + climate_future.set_result(state) + + # Get entities and set up state synchronization + entities, services = await client.list_entities_services() + initial_state_helper = InitialStateHelper(entities) + climate_infos = [e for e in entities if isinstance(e, ClimateInfo)] + assert len(climate_infos) >= 1, "Expected at least 1 climate entity" + + # Subscribe with the wrapper that filters initial states + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for all initial states to be broadcast + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") + + test_climate = next( + (c for c in climate_infos if c.name == "Dual-mode Thermostat"), None + ) + assert test_climate is not None, ( + "Dual-mode Thermostat thermostat climate not found" + ) + + # Adjust setpoints + client.climate_command( + test_climate.key, + mode=ClimateMode.HEAT, + target_temperature_low=21.5, + target_temperature_high=26.5, + ) + + try: + climate_state = await asyncio.wait_for(climate_future, timeout=5.0) + except TimeoutError: + pytest.fail("Climate state not received within 5 seconds") + + assert isinstance(climate_state, aioesphomeapi.ClimateState) + assert climate_state.mode == ClimateMode.HEAT + assert climate_state.target_temperature_low == 21.5 + assert climate_state.target_temperature_high == 26.5 diff --git a/tests/integration/test_host_mode_many_entities.py b/tests/integration/test_host_mode_many_entities.py index fbe3dc25c8..299644d496 100644 --- a/tests/integration/test_host_mode_many_entities.py +++ b/tests/integration/test_host_mode_many_entities.py @@ -5,7 +5,10 @@ from __future__ import annotations import asyncio from aioesphomeapi import ( + ClimateFanMode, + ClimateFeature, ClimateInfo, + ClimateMode, DateInfo, DateState, DateTimeInfo, @@ -121,6 +124,46 @@ async def test_host_mode_many_entities( assert len(climate_infos) >= 1, "Expected at least 1 climate entity" climate_info = climate_infos[0] + + # Verify feature flags set as expected + assert climate_info.feature_flags == ( + ClimateFeature.SUPPORTS_ACTION + | ClimateFeature.SUPPORTS_CURRENT_HUMIDITY + | ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE + | ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE + | ClimateFeature.SUPPORTS_TARGET_HUMIDITY + ) + + # Verify modes + assert climate_info.supported_modes == [ + ClimateMode.OFF, + ClimateMode.COOL, + ClimateMode.HEAT, + ], f"Expected modes [OFF, COOL, HEAT], got {climate_info.supported_modes}" + + # Verify visual parameters + assert climate_info.visual_min_temperature == 15.0, ( + f"Expected min_temperature=15.0, got {climate_info.visual_min_temperature}" + ) + assert climate_info.visual_max_temperature == 32.0, ( + f"Expected max_temperature=32.0, got {climate_info.visual_max_temperature}" + ) + assert climate_info.visual_target_temperature_step == 0.1, ( + f"Expected temperature_step=0.1, got {climate_info.visual_target_temperature_step}" + ) + assert climate_info.visual_min_humidity == 20.0, ( + f"Expected min_humidity=20.0, got {climate_info.visual_min_humidity}" + ) + assert climate_info.visual_max_humidity == 70.0, ( + f"Expected max_humidity=70.0, got {climate_info.visual_max_humidity}" + ) + + # Verify fan modes + assert climate_info.supported_fan_modes == [ + ClimateFanMode.ON, + ClimateFanMode.AUTO, + ], f"Expected fan modes [ON, AUTO], got {climate_info.supported_fan_modes}" + # Verify the thermostat has presets assert len(climate_info.supported_presets) > 0, ( "Expected climate to have presets" From a5542e0d2bf7e10813bd43ca386fbd3c680ef358 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 21:38:05 -1000 Subject: [PATCH 243/336] [sensor] Optimize calibration and Or filters with FixedVector (#11437) --- esphome/components/sensor/filter.cpp | 16 +++++++++++----- esphome/components/sensor/filter.h | 13 ++++++------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 0d57c792db..e8d04d161b 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -313,7 +313,7 @@ optional DeltaFilter::new_value(float value) { } // OrFilter -OrFilter::OrFilter(std::vector filters) : filters_(std::move(filters)), phi_(this) {} +OrFilter::OrFilter(std::initializer_list filters) : filters_(filters), phi_(this) {} OrFilter::PhiNode::PhiNode(OrFilter *or_parent) : or_parent_(or_parent) {} optional OrFilter::PhiNode::new_value(float value) { @@ -326,14 +326,14 @@ optional OrFilter::PhiNode::new_value(float value) { } optional OrFilter::new_value(float value) { this->has_value_ = false; - for (Filter *filter : this->filters_) + for (auto *filter : this->filters_) filter->input(value); return {}; } void OrFilter::initialize(Sensor *parent, Filter *next) { Filter::initialize(parent, next); - for (Filter *filter : this->filters_) { + for (auto *filter : this->filters_) { filter->initialize(parent, &this->phi_); } this->phi_.initialize(parent, nullptr); @@ -386,18 +386,24 @@ void HeartbeatFilter::setup() { } float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; } +CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list> linear_functions) + : linear_functions_(linear_functions) {} + optional CalibrateLinearFilter::new_value(float value) { - for (std::array f : this->linear_functions_) { + for (const auto &f : this->linear_functions_) { if (!std::isfinite(f[2]) || value < f[2]) return (value * f[0]) + f[1]; } return NAN; } +CalibratePolynomialFilter::CalibratePolynomialFilter(std::initializer_list coefficients) + : coefficients_(coefficients) {} + optional CalibratePolynomialFilter::new_value(float value) { float res = 0.0f; float x = 1.0f; - for (float coefficient : this->coefficients_) { + for (const auto &coefficient : this->coefficients_) { res += x * coefficient; x *= value; } diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index e09c66afcb..03a1e0f24c 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -422,7 +422,7 @@ class DeltaFilter : public Filter { class OrFilter : public Filter { public: - explicit OrFilter(std::vector filters); + explicit OrFilter(std::initializer_list filters); void initialize(Sensor *parent, Filter *next) override; @@ -438,28 +438,27 @@ class OrFilter : public Filter { OrFilter *or_parent_; }; - std::vector filters_; + FixedVector filters_; PhiNode phi_; bool has_value_{false}; }; class CalibrateLinearFilter : public Filter { public: - CalibrateLinearFilter(std::vector> linear_functions) - : linear_functions_(std::move(linear_functions)) {} + explicit CalibrateLinearFilter(std::initializer_list> linear_functions); optional new_value(float value) override; protected: - std::vector> linear_functions_; + FixedVector> linear_functions_; }; class CalibratePolynomialFilter : public Filter { public: - CalibratePolynomialFilter(std::vector coefficients) : coefficients_(std::move(coefficients)) {} + explicit CalibratePolynomialFilter(std::initializer_list coefficients); optional new_value(float value) override; protected: - std::vector coefficients_; + FixedVector coefficients_; }; class ClampFilter : public Filter { From 4bb4a309e7aa5aeee0ed71eaf900774141324538 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 22:09:22 -1000 Subject: [PATCH 244/336] [binary_sensor] Optimize AutorepeatFilter with FixedVector --- esphome/components/binary_sensor/__init__.py | 35 +++++++++++++------- esphome/components/binary_sensor/filter.cpp | 3 +- esphome/components/binary_sensor/filter.h | 11 ++---- tests/components/binary_sensor/common.yaml | 33 ++++++++++++++++++ 4 files changed, 59 insertions(+), 23 deletions(-) diff --git a/esphome/components/binary_sensor/__init__.py b/esphome/components/binary_sensor/__init__.py index 6aa97d6e05..26e784a0b8 100644 --- a/esphome/components/binary_sensor/__init__.py +++ b/esphome/components/binary_sensor/__init__.py @@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id): ), ) async def autorepeat_filter_to_code(config, filter_id): - timings = [] if len(config) > 0: - timings.extend( - (conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON]) - for conf in config - ) - else: - timings.append( - ( - cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", conf[CONF_DELAY]), + ("time_off", conf[CONF_TIME_OFF]), + ("time_on", conf[CONF_TIME_ON]), ) - ) + for conf in config + ] + else: + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds), + ( + "time_off", + cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, + ), + ( + "time_on", + cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + ), + ) + ] var = cg.new_Pvariable(filter_id, timings) await cg.register_component(var, {}) return var diff --git a/esphome/components/binary_sensor/filter.cpp b/esphome/components/binary_sensor/filter.cpp index 3567e9c72b..8f31cf6fc2 100644 --- a/esphome/components/binary_sensor/filter.cpp +++ b/esphome/components/binary_sensor/filter.cpp @@ -1,7 +1,6 @@ #include "filter.h" #include "binary_sensor.h" -#include namespace esphome { @@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD optional InvertFilter::new_value(bool value) { return !value; } -AutorepeatFilter::AutorepeatFilter(std::vector timings) : timings_(std::move(timings)) {} +AutorepeatFilter::AutorepeatFilter(std::initializer_list timings) : timings_(timings) {} optional AutorepeatFilter::new_value(bool value) { if (value) { diff --git a/esphome/components/binary_sensor/filter.h b/esphome/components/binary_sensor/filter.h index 16f44aa5fe..a7eb080feb 100644 --- a/esphome/components/binary_sensor/filter.h +++ b/esphome/components/binary_sensor/filter.h @@ -4,8 +4,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include - namespace esphome { namespace binary_sensor { @@ -82,11 +80,6 @@ class InvertFilter : public Filter { }; struct AutorepeatFilterTiming { - AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) { - this->delay = delay; - this->time_off = off; - this->time_on = on; - } uint32_t delay; uint32_t time_off; uint32_t time_on; @@ -94,7 +87,7 @@ struct AutorepeatFilterTiming { class AutorepeatFilter : public Filter, public Component { public: - explicit AutorepeatFilter(std::vector timings); + explicit AutorepeatFilter(std::initializer_list timings); optional new_value(bool value) override; @@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component { void next_timing_(); void next_value_(bool val); - std::vector timings_; + FixedVector timings_; uint8_t active_timing_{0}; }; diff --git a/tests/components/binary_sensor/common.yaml b/tests/components/binary_sensor/common.yaml index ed6322768f..6965c1feeb 100644 --- a/tests/components/binary_sensor/common.yaml +++ b/tests/components/binary_sensor/common.yaml @@ -37,3 +37,36 @@ binary_sensor: format: "New state is %s" args: ['x.has_value() ? ONOFF(x) : "Unknown"'] - binary_sensor.invalidate_state: some_binary_sensor + + # Test autorepeat with default configuration (no timings) + - platform: template + id: autorepeat_default + name: "Autorepeat Default" + filters: + - autorepeat: + + # Test autorepeat with single timing entry + - platform: template + id: autorepeat_single + name: "Autorepeat Single" + filters: + - autorepeat: + - delay: 2s + time_off: 200ms + time_on: 800ms + + # Test autorepeat with three timing entries + - platform: template + id: autorepeat_multiple + name: "Autorepeat Multiple" + filters: + - autorepeat: + - delay: 500ms + time_off: 50ms + time_on: 950ms + - delay: 2s + time_off: 100ms + time_on: 900ms + - delay: 10s + time_off: 200ms + time_on: 800ms From 51678fe4a43e09739a4fa2fa0d8e8bfd9f87b704 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 22:36:10 -1000 Subject: [PATCH 245/336] [climate] Remove unnecessary vector allocations in state save/restore --- esphome/components/climate/climate.cpp | 44 ++++++++++++++++++-------- 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index 24a3fe6d5a..87d03f78c5 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -385,12 +385,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) { state.uses_custom_fan_mode = true; const auto &supported = traits.get_supported_custom_fan_modes(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_fan_mode) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &mode : supported) { + if (mode == custom_fan_mode) { state.custom_fan_mode = i; break; } + i++; } } if (traits.get_supports_presets() && preset.has_value()) { @@ -400,12 +402,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) { state.uses_custom_preset = true; const auto &supported = traits.get_supported_custom_presets(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_preset) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &preset : supported) { + if (preset == custom_preset) { state.custom_preset = i; break; } + i++; } } if (traits.get_supports_swing_modes()) { @@ -549,22 +553,34 @@ void ClimateDeviceRestoreState::apply(Climate *climate) { climate->fan_mode = this->fan_mode; } if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) { - // std::set has consistent order (lexicographic for strings), so this is ok + // std::set has consistent order (lexicographic for strings) const auto &modes = traits.get_supported_custom_fan_modes(); - std::vector modes_vec{modes.begin(), modes.end()}; - if (custom_fan_mode < modes_vec.size()) { - climate->custom_fan_mode = modes_vec[this->custom_fan_mode]; + if (custom_fan_mode < modes.size()) { + size_t i = 0; + for (const auto &mode : modes) { + if (i == this->custom_fan_mode) { + climate->custom_fan_mode = mode; + break; + } + i++; + } } } if (traits.get_supports_presets() && !this->uses_custom_preset) { climate->preset = this->preset; } if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) { - // std::set has consistent order (lexicographic for strings), so this is ok + // std::set has consistent order (lexicographic for strings) const auto &presets = traits.get_supported_custom_presets(); - std::vector presets_vec{presets.begin(), presets.end()}; - if (custom_preset < presets_vec.size()) { - climate->custom_preset = presets_vec[this->custom_preset]; + if (custom_preset < presets.size()) { + size_t i = 0; + for (const auto &preset : presets) { + if (i == this->custom_preset) { + climate->custom_preset = preset; + break; + } + i++; + } } } if (traits.get_supports_swing_modes()) { From 7f2cc47ed6c4fa7ef531da3d80066a4829e37430 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:25:59 -1000 Subject: [PATCH 246/336] [binary_sensor] Add compile test for auto repeat (#11443) --- tests/components/binary_sensor/common.yaml | 33 ++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/components/binary_sensor/common.yaml b/tests/components/binary_sensor/common.yaml index ed6322768f..6965c1feeb 100644 --- a/tests/components/binary_sensor/common.yaml +++ b/tests/components/binary_sensor/common.yaml @@ -37,3 +37,36 @@ binary_sensor: format: "New state is %s" args: ['x.has_value() ? ONOFF(x) : "Unknown"'] - binary_sensor.invalidate_state: some_binary_sensor + + # Test autorepeat with default configuration (no timings) + - platform: template + id: autorepeat_default + name: "Autorepeat Default" + filters: + - autorepeat: + + # Test autorepeat with single timing entry + - platform: template + id: autorepeat_single + name: "Autorepeat Single" + filters: + - autorepeat: + - delay: 2s + time_off: 200ms + time_on: 800ms + + # Test autorepeat with three timing entries + - platform: template + id: autorepeat_multiple + name: "Autorepeat Multiple" + filters: + - autorepeat: + - delay: 500ms + time_off: 50ms + time_on: 950ms + - delay: 2s + time_off: 100ms + time_on: 900ms + - delay: 10s + time_off: 200ms + time_on: 800ms From f9f0d895f7842f273430b67b694e403e56400277 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:28:23 -1000 Subject: [PATCH 247/336] [gpio] Optimize switch interlock with FixedVector --- esphome/components/gpio/switch/gpio_switch.cpp | 2 +- esphome/components/gpio/switch/gpio_switch.h | 7 +++---- tests/components/gpio/common.yaml | 17 +++++++++++++++++ tests/components/gpio/test.esp8266-ard.yaml | 3 +++ 4 files changed, 24 insertions(+), 5 deletions(-) diff --git a/esphome/components/gpio/switch/gpio_switch.cpp b/esphome/components/gpio/switch/gpio_switch.cpp index b67af5e95d..9043a6a493 100644 --- a/esphome/components/gpio/switch/gpio_switch.cpp +++ b/esphome/components/gpio/switch/gpio_switch.cpp @@ -67,7 +67,7 @@ void GPIOSwitch::write_state(bool state) { this->pin_->digital_write(state); this->publish_state(state); } -void GPIOSwitch::set_interlock(const std::vector &interlock) { this->interlock_ = interlock; } +void GPIOSwitch::set_interlock(const std::initializer_list &interlock) { this->interlock_ = interlock; } } // namespace gpio } // namespace esphome diff --git a/esphome/components/gpio/switch/gpio_switch.h b/esphome/components/gpio/switch/gpio_switch.h index 94d49745b5..080decac08 100644 --- a/esphome/components/gpio/switch/gpio_switch.h +++ b/esphome/components/gpio/switch/gpio_switch.h @@ -2,10 +2,9 @@ #include "esphome/core/component.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/switch/switch.h" -#include - namespace esphome { namespace gpio { @@ -19,14 +18,14 @@ class GPIOSwitch : public switch_::Switch, public Component { void setup() override; void dump_config() override; - void set_interlock(const std::vector &interlock); + void set_interlock(const std::initializer_list &interlock); void set_interlock_wait_time(uint32_t interlock_wait_time) { interlock_wait_time_ = interlock_wait_time; } protected: void write_state(bool state) override; GPIOPin *pin_; - std::vector interlock_; + FixedVector interlock_; uint32_t interlock_wait_time_{0}; }; diff --git a/tests/components/gpio/common.yaml b/tests/components/gpio/common.yaml index 4e237349d9..b8e8fa81e4 100644 --- a/tests/components/gpio/common.yaml +++ b/tests/components/gpio/common.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: ${switch_pin} id: gpio_switch + + - platform: gpio + pin: ${switch_pin_2} + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: ${switch_pin_3} + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: ${switch_pin_4} + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.esp8266-ard.yaml b/tests/components/gpio/test.esp8266-ard.yaml index e1660ec47c..e13b4520d1 100644 --- a/tests/components/gpio/test.esp8266-ard.yaml +++ b/tests/components/gpio/test.esp8266-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO0 output_pin: GPIO2 switch_pin: GPIO15 + switch_pin_2: GPIO12 + switch_pin_3: GPIO13 + switch_pin_4: GPIO14 <<: !include common.yaml From 245f083a5c78f12e258624d3cbe033d2232c78f8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:29:15 -1000 Subject: [PATCH 248/336] Add gpio switch interlock compile tests --- tests/components/gpio/common.yaml | 17 +++++++++++++++++ tests/components/gpio/test.esp8266-ard.yaml | 3 +++ 2 files changed, 20 insertions(+) diff --git a/tests/components/gpio/common.yaml b/tests/components/gpio/common.yaml index 4e237349d9..b8e8fa81e4 100644 --- a/tests/components/gpio/common.yaml +++ b/tests/components/gpio/common.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: ${switch_pin} id: gpio_switch + + - platform: gpio + pin: ${switch_pin_2} + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: ${switch_pin_3} + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: ${switch_pin_4} + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.esp8266-ard.yaml b/tests/components/gpio/test.esp8266-ard.yaml index e1660ec47c..e13b4520d1 100644 --- a/tests/components/gpio/test.esp8266-ard.yaml +++ b/tests/components/gpio/test.esp8266-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO0 output_pin: GPIO2 switch_pin: GPIO15 + switch_pin_2: GPIO12 + switch_pin_3: GPIO13 + switch_pin_4: GPIO14 <<: !include common.yaml From 3aedfe8be34d7fd7758d96fdc6e6ee95065fc705 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:30:13 -1000 Subject: [PATCH 249/336] [binary_sensor] Optimize AutorepeatFilter with FixedVector (#11444) --- esphome/components/binary_sensor/__init__.py | 35 +++++++++++++------- esphome/components/binary_sensor/filter.cpp | 3 +- esphome/components/binary_sensor/filter.h | 11 ++---- 3 files changed, 26 insertions(+), 23 deletions(-) diff --git a/esphome/components/binary_sensor/__init__.py b/esphome/components/binary_sensor/__init__.py index 6aa97d6e05..26e784a0b8 100644 --- a/esphome/components/binary_sensor/__init__.py +++ b/esphome/components/binary_sensor/__init__.py @@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id): ), ) async def autorepeat_filter_to_code(config, filter_id): - timings = [] if len(config) > 0: - timings.extend( - (conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON]) - for conf in config - ) - else: - timings.append( - ( - cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", conf[CONF_DELAY]), + ("time_off", conf[CONF_TIME_OFF]), + ("time_on", conf[CONF_TIME_ON]), ) - ) + for conf in config + ] + else: + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds), + ( + "time_off", + cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, + ), + ( + "time_on", + cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + ), + ) + ] var = cg.new_Pvariable(filter_id, timings) await cg.register_component(var, {}) return var diff --git a/esphome/components/binary_sensor/filter.cpp b/esphome/components/binary_sensor/filter.cpp index 3567e9c72b..8f31cf6fc2 100644 --- a/esphome/components/binary_sensor/filter.cpp +++ b/esphome/components/binary_sensor/filter.cpp @@ -1,7 +1,6 @@ #include "filter.h" #include "binary_sensor.h" -#include namespace esphome { @@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD optional InvertFilter::new_value(bool value) { return !value; } -AutorepeatFilter::AutorepeatFilter(std::vector timings) : timings_(std::move(timings)) {} +AutorepeatFilter::AutorepeatFilter(std::initializer_list timings) : timings_(timings) {} optional AutorepeatFilter::new_value(bool value) { if (value) { diff --git a/esphome/components/binary_sensor/filter.h b/esphome/components/binary_sensor/filter.h index 16f44aa5fe..a7eb080feb 100644 --- a/esphome/components/binary_sensor/filter.h +++ b/esphome/components/binary_sensor/filter.h @@ -4,8 +4,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include - namespace esphome { namespace binary_sensor { @@ -82,11 +80,6 @@ class InvertFilter : public Filter { }; struct AutorepeatFilterTiming { - AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) { - this->delay = delay; - this->time_off = off; - this->time_on = on; - } uint32_t delay; uint32_t time_off; uint32_t time_on; @@ -94,7 +87,7 @@ struct AutorepeatFilterTiming { class AutorepeatFilter : public Filter, public Component { public: - explicit AutorepeatFilter(std::vector timings); + explicit AutorepeatFilter(std::initializer_list timings); optional new_value(bool value) override; @@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component { void next_timing_(); void next_value_(bool val); - std::vector timings_; + FixedVector timings_; uint8_t active_timing_{0}; }; From 53d0f589bab1dcead2f35e47d9a5868ef639e5f2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:34:16 -1000 Subject: [PATCH 250/336] Add gpio switch interlock compile tests --- tests/components/gpio/test.esp32-c3-idf.yaml | 3 +++ tests/components/gpio/test.esp32-idf.yaml | 3 +++ tests/components/gpio/test.nrf52-adafruit.yaml | 17 +++++++++++++++++ tests/components/gpio/test.nrf52-mcumgr.yaml | 17 +++++++++++++++++ tests/components/gpio/test.rp2040-ard.yaml | 3 +++ 5 files changed, 43 insertions(+) diff --git a/tests/components/gpio/test.esp32-c3-idf.yaml b/tests/components/gpio/test.esp32-c3-idf.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.esp32-c3-idf.yaml +++ b/tests/components/gpio/test.esp32-c3-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml diff --git a/tests/components/gpio/test.esp32-idf.yaml b/tests/components/gpio/test.esp32-idf.yaml index 09f41abb79..862aa533ea 100644 --- a/tests/components/gpio/test.esp32-idf.yaml +++ b/tests/components/gpio/test.esp32-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO12 output_pin: GPIO13 switch_pin: GPIO14 + switch_pin_2: GPIO15 + switch_pin_3: GPIO16 + switch_pin_4: GPIO17 <<: !include common.yaml diff --git a/tests/components/gpio/test.nrf52-adafruit.yaml b/tests/components/gpio/test.nrf52-adafruit.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-adafruit.yaml +++ b/tests/components/gpio/test.nrf52-adafruit.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.nrf52-mcumgr.yaml b/tests/components/gpio/test.nrf52-mcumgr.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-mcumgr.yaml +++ b/tests/components/gpio/test.nrf52-mcumgr.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.rp2040-ard.yaml b/tests/components/gpio/test.rp2040-ard.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.rp2040-ard.yaml +++ b/tests/components/gpio/test.rp2040-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml From 87e9a7a1bd8923b2ab0c36b0f8fb1cbc6ae01308 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 20 Oct 2025 23:35:18 -1000 Subject: [PATCH 251/336] [climate] Remove unnecessary vector allocations in state save/restore (#11445) --- esphome/components/climate/climate.cpp | 44 ++++++++++++++++++-------- 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index 24a3fe6d5a..87d03f78c5 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -385,12 +385,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) { state.uses_custom_fan_mode = true; const auto &supported = traits.get_supported_custom_fan_modes(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_fan_mode) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &mode : supported) { + if (mode == custom_fan_mode) { state.custom_fan_mode = i; break; } + i++; } } if (traits.get_supports_presets() && preset.has_value()) { @@ -400,12 +402,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) { state.uses_custom_preset = true; const auto &supported = traits.get_supported_custom_presets(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_preset) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &preset : supported) { + if (preset == custom_preset) { state.custom_preset = i; break; } + i++; } } if (traits.get_supports_swing_modes()) { @@ -549,22 +553,34 @@ void ClimateDeviceRestoreState::apply(Climate *climate) { climate->fan_mode = this->fan_mode; } if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) { - // std::set has consistent order (lexicographic for strings), so this is ok + // std::set has consistent order (lexicographic for strings) const auto &modes = traits.get_supported_custom_fan_modes(); - std::vector modes_vec{modes.begin(), modes.end()}; - if (custom_fan_mode < modes_vec.size()) { - climate->custom_fan_mode = modes_vec[this->custom_fan_mode]; + if (custom_fan_mode < modes.size()) { + size_t i = 0; + for (const auto &mode : modes) { + if (i == this->custom_fan_mode) { + climate->custom_fan_mode = mode; + break; + } + i++; + } } } if (traits.get_supports_presets() && !this->uses_custom_preset) { climate->preset = this->preset; } if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) { - // std::set has consistent order (lexicographic for strings), so this is ok + // std::set has consistent order (lexicographic for strings) const auto &presets = traits.get_supported_custom_presets(); - std::vector presets_vec{presets.begin(), presets.end()}; - if (custom_preset < presets_vec.size()) { - climate->custom_preset = presets_vec[this->custom_preset]; + if (custom_preset < presets.size()) { + size_t i = 0; + for (const auto &preset : presets) { + if (i == this->custom_preset) { + climate->custom_preset = preset; + break; + } + i++; + } } } if (traits.get_supports_swing_modes()) { From 80265a6bd2ad9208b3fa0b0ee8137495b4aeb7f0 Mon Sep 17 00:00:00 2001 From: Petr Kejval Date: Tue, 21 Oct 2025 15:17:07 +0200 Subject: [PATCH 252/336] [sensor] Add optimistic option to heartbeat filter (#10993) Co-authored-by: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Co-authored-by: J. Nick Koston --- esphome/components/sensor/__init__.py | 23 +++++++++++++++++++++- esphome/components/sensor/filter.cpp | 5 +++++ esphome/components/sensor/filter.h | 5 +++-- tests/components/template/common-base.yaml | 3 +++ 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index e603896f6d..7e91bb83c4 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -28,6 +28,8 @@ from esphome.const import ( CONF_ON_RAW_VALUE, CONF_ON_VALUE, CONF_ON_VALUE_RANGE, + CONF_OPTIMISTIC, + CONF_PERIOD, CONF_QUANTILE, CONF_SEND_EVERY, CONF_SEND_FIRST_AT, @@ -644,10 +646,29 @@ async def throttle_with_priority_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_) +HEARTBEAT_SCHEMA = cv.Schema( + { + cv.Required(CONF_PERIOD): cv.positive_time_period_milliseconds, + cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean, + } +) + + @FILTER_REGISTRY.register( - "heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds + "heartbeat", + HeartbeatFilter, + cv.Any( + cv.positive_time_period_milliseconds, + HEARTBEAT_SCHEMA, + ), ) async def heartbeat_filter_to_code(config, filter_id): + if isinstance(config, dict): + var = cg.new_Pvariable(filter_id, config[CONF_PERIOD]) + await cg.register_component(var, {}) + cg.add(var.set_optimistic(config[CONF_OPTIMISTIC])) + return var + var = cg.new_Pvariable(filter_id, config) await cg.register_component(var, {}) return var diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index e8d04d161b..65d8dea31c 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -372,8 +372,12 @@ optional HeartbeatFilter::new_value(float value) { this->last_input_ = value; this->has_value_ = true; + if (this->optimistic_) { + return value; + } return {}; } + void HeartbeatFilter::setup() { this->set_interval("heartbeat", this->time_period_, [this]() { ESP_LOGVV(TAG, "HeartbeatFilter(%p)::interval(has_value=%s, last_input=%f)", this, YESNO(this->has_value_), @@ -384,6 +388,7 @@ void HeartbeatFilter::setup() { this->output(this->last_input_); }); } + float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; } CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list> linear_functions) diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index 03a1e0f24c..ecd55308d1 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -396,15 +396,16 @@ class HeartbeatFilter : public Filter, public Component { explicit HeartbeatFilter(uint32_t time_period); void setup() override; - optional new_value(float value) override; - float get_setup_priority() const override; + void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; } + protected: uint32_t time_period_; float last_input_; bool has_value_{false}; + bool optimistic_{false}; }; class DeltaFilter : public Filter { diff --git a/tests/components/template/common-base.yaml b/tests/components/template/common-base.yaml index ea812532d4..b873af5207 100644 --- a/tests/components/template/common-base.yaml +++ b/tests/components/template/common-base.yaml @@ -101,6 +101,9 @@ sensor: - filter_out: 10 - filter_out: !lambda return NAN; - heartbeat: 5s + - heartbeat: + period: 5s + optimistic: true - lambda: return x * (9.0/5.0) + 32.0; - max: window_size: 10 From 8e8a2bde95e306fd029e3c40abc2547c11a34a5b Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Tue, 21 Oct 2025 13:37:29 -0700 Subject: [PATCH 253/336] [light] Decouple AddressableLight and Light transition classes (#11166) Co-authored-by: J. Nick Koston --- esphome/components/light/addressable_light.cpp | 2 +- esphome/components/light/addressable_light.h | 4 ++-- esphome/components/light/light_transformer.h | 4 ++++ esphome/components/light/transformers.h | 6 +----- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/esphome/components/light/addressable_light.cpp b/esphome/components/light/addressable_light.cpp index a8e0c7b762..cd83015ecb 100644 --- a/esphome/components/light/addressable_light.cpp +++ b/esphome/components/light/addressable_light.cpp @@ -62,7 +62,7 @@ void AddressableLightTransformer::start() { } optional AddressableLightTransformer::apply() { - float smoothed_progress = LightTransitionTransformer::smoothed_progress(this->get_progress_()); + float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_()); // When running an output-buffer modifying effect, don't try to transition individual LEDs, but instead just fade the // LightColorValues. write_state() then picks up the change in brightness, and the color change is picked up by the diff --git a/esphome/components/light/addressable_light.h b/esphome/components/light/addressable_light.h index 3e94a39745..c8ed4897fa 100644 --- a/esphome/components/light/addressable_light.h +++ b/esphome/components/light/addressable_light.h @@ -8,7 +8,7 @@ #include "esphome/core/defines.h" #include "light_output.h" #include "light_state.h" -#include "transformers.h" +#include "light_transformer.h" #ifdef USE_POWER_SUPPLY #include "esphome/components/power_supply/power_supply.h" @@ -103,7 +103,7 @@ class AddressableLight : public LightOutput, public Component { bool effect_active_{false}; }; -class AddressableLightTransformer : public LightTransitionTransformer { +class AddressableLightTransformer : public LightTransformer { public: AddressableLightTransformer(AddressableLight &light) : light_(light) {} diff --git a/esphome/components/light/light_transformer.h b/esphome/components/light/light_transformer.h index fb9b709187..a84183c03c 100644 --- a/esphome/components/light/light_transformer.h +++ b/esphome/components/light/light_transformer.h @@ -38,6 +38,10 @@ class LightTransformer { const LightColorValues &get_target_values() const { return this->target_values_; } protected: + // This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like + // transition from 0 to 1 on x = [0, 1] + static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); } + /// The progress of this transition, on a scale of 0 to 1. float get_progress_() { uint32_t now = esphome::millis(); diff --git a/esphome/components/light/transformers.h b/esphome/components/light/transformers.h index 8d49acff97..71d41a66d3 100644 --- a/esphome/components/light/transformers.h +++ b/esphome/components/light/transformers.h @@ -50,15 +50,11 @@ class LightTransitionTransformer : public LightTransformer { if (this->changing_color_mode_) p = p < 0.5f ? p * 2 : (p - 0.5) * 2; - float v = LightTransitionTransformer::smoothed_progress(p); + float v = LightTransformer::smoothed_progress(p); return LightColorValues::lerp(start, end, v); } protected: - // This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like - // transition from 0 to 1 on x = [0, 1] - static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); } - LightColorValues end_values_{}; LightColorValues intermediate_values_{}; bool changing_color_mode_{false}; From 9e693335b6ada2ddbfaba252831c48390ad56c1f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 10:50:29 -1000 Subject: [PATCH 254/336] [binary_sensor] Optimize MultiClickTrigger with FixedVector --- esphome/components/binary_sensor/automation.h | 8 +-- tests/components/binary_sensor/common.yaml | 66 +++++++++++++++++++ 2 files changed, 70 insertions(+), 4 deletions(-) diff --git a/esphome/components/binary_sensor/automation.h b/esphome/components/binary_sensor/automation.h index b46436dc41..0bc7b9acb3 100644 --- a/esphome/components/binary_sensor/automation.h +++ b/esphome/components/binary_sensor/automation.h @@ -2,11 +2,11 @@ #include #include -#include #include "esphome/core/component.h" #include "esphome/core/automation.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/binary_sensor.h" namespace esphome { @@ -92,8 +92,8 @@ class DoubleClickTrigger : public Trigger<> { class MultiClickTrigger : public Trigger<>, public Component { public: - explicit MultiClickTrigger(BinarySensor *parent, std::vector timing) - : parent_(parent), timing_(std::move(timing)) {} + explicit MultiClickTrigger(BinarySensor *parent, std::initializer_list timing) + : parent_(parent), timing_(timing) {} void setup() override { this->last_state_ = this->parent_->get_state_default(false); @@ -115,7 +115,7 @@ class MultiClickTrigger : public Trigger<>, public Component { void trigger_(); BinarySensor *parent_; - std::vector timing_; + FixedVector timing_; uint32_t invalid_cooldown_{1000}; optional at_index_{}; bool last_state_{false}; diff --git a/tests/components/binary_sensor/common.yaml b/tests/components/binary_sensor/common.yaml index 6965c1feeb..e3fd159b08 100644 --- a/tests/components/binary_sensor/common.yaml +++ b/tests/components/binary_sensor/common.yaml @@ -70,3 +70,69 @@ binary_sensor: - delay: 10s time_off: 200ms time_on: 800ms + + # Test on_multi_click with single click + - platform: template + id: multi_click_single + name: "Multi Click Single" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Single click detected" + + # Test on_multi_click with double click + - platform: template + id: multi_click_double + name: "Multi Click Double" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Double click detected" + + # Test on_multi_click with complex pattern (5 events) + - platform: template + id: multi_click_complex + name: "Multi Click Complex" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + then: + - logger.log: "Complex pattern detected" + + # Test on_multi_click with custom invalid_cooldown + - platform: template + id: multi_click_cooldown + name: "Multi Click Cooldown" + on_multi_click: + - timing: + - state: true + min_length: 100ms + max_length: 500ms + invalid_cooldown: 2s + then: + - logger.log: "Click with custom cooldown" From d6961610c7cc8126136757cda8e517d5efe73ca1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:10:02 -1000 Subject: [PATCH 255/336] [light] Replace std::vector with FixedVector in strobe and color_wipe effects --- .../light/addressable_light_effect.h | 6 +-- esphome/components/light/base_light_effects.h | 6 +-- tests/components/light/common.yaml | 40 +++++++++++++++++++ 3 files changed, 46 insertions(+), 6 deletions(-) diff --git a/esphome/components/light/addressable_light_effect.h b/esphome/components/light/addressable_light_effect.h index fcf76b3cb0..9caccad634 100644 --- a/esphome/components/light/addressable_light_effect.h +++ b/esphome/components/light/addressable_light_effect.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/light/light_state.h" #include "esphome/components/light/addressable_light.h" @@ -113,7 +113,7 @@ struct AddressableColorWipeEffectColor { class AddressableColorWipeEffect : public AddressableLightEffect { public: explicit AddressableColorWipeEffect(const std::string &name) : AddressableLightEffect(name) {} - void set_colors(const std::vector &colors) { this->colors_ = colors; } + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } void set_add_led_interval(uint32_t add_led_interval) { this->add_led_interval_ = add_led_interval; } void set_reverse(bool reverse) { this->reverse_ = reverse; } void apply(AddressableLight &it, const Color ¤t_color) override { @@ -155,7 +155,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect { } protected: - std::vector colors_; + FixedVector colors_; size_t at_color_{0}; uint32_t last_add_{0}; uint32_t add_led_interval_{}; diff --git a/esphome/components/light/base_light_effects.h b/esphome/components/light/base_light_effects.h index ff6cd1ccfe..c74d19fe14 100644 --- a/esphome/components/light/base_light_effects.h +++ b/esphome/components/light/base_light_effects.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/automation.h" +#include "esphome/core/helpers.h" #include "light_effect.h" namespace esphome { @@ -188,10 +188,10 @@ class StrobeLightEffect : public LightEffect { this->last_switch_ = now; } - void set_colors(const std::vector &colors) { this->colors_ = colors; } + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } protected: - std::vector colors_; + FixedVector colors_; uint32_t last_switch_{0}; size_t at_color_{0}; }; diff --git a/tests/components/light/common.yaml b/tests/components/light/common.yaml index d4f64dcdea..f807014065 100644 --- a/tests/components/light/common.yaml +++ b/tests/components/light/common.yaml @@ -123,3 +123,43 @@ light: red: 100% green: 50% blue: 50% + # Test StrobeLightEffect with multiple colors + - platform: monochromatic + id: test_strobe_multiple + name: Strobe Multiple Colors + output: test_ledc_1 + effects: + - strobe: + name: Strobe Multi + colors: + - state: true + brightness: 100% + duration: 500ms + - state: false + duration: 250ms + - state: true + brightness: 50% + duration: 500ms + # Test StrobeLightEffect with transition + - platform: rgb + id: test_strobe_transition + name: Strobe With Transition + red: test_ledc_1 + green: test_ledc_2 + blue: test_ledc_3 + effects: + - strobe: + name: Strobe Transition + colors: + - state: true + red: 100% + green: 0% + blue: 0% + duration: 1s + transition_length: 500ms + - state: true + red: 0% + green: 100% + blue: 0% + duration: 1s + transition_length: 500ms From a05c5ea24016d4f7680f5ba632c00a1140d30b43 Mon Sep 17 00:00:00 2001 From: Anton Sergunov Date: Wed, 22 Oct 2025 03:10:25 +0600 Subject: [PATCH 256/336] [uart] Make rx pin respect pullup and pulldown settings (#9248) --- esphome/components/uart/uart_component_esp8266.cpp | 7 +++++++ esphome/components/uart/uart_component_esp_idf.cpp | 10 ++++++++++ esphome/components/uart/uart_component_libretiny.cpp | 7 +++++++ esphome/components/uart/uart_component_rp2040.cpp | 7 +++++++ 4 files changed, 31 insertions(+) diff --git a/esphome/components/uart/uart_component_esp8266.cpp b/esphome/components/uart/uart_component_esp8266.cpp index b2bf2bacf1..7a453dbb50 100644 --- a/esphome/components/uart/uart_component_esp8266.cpp +++ b/esphome/components/uart/uart_component_esp8266.cpp @@ -56,6 +56,13 @@ uint32_t ESP8266UartComponent::get_config() { } void ESP8266UartComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + // Use Arduino HardwareSerial UARTs if all used pins match the ones // preconfigured by the platform. For example if RX disabled but TX pin // is 1 we still want to use Serial. diff --git a/esphome/components/uart/uart_component_esp_idf.cpp b/esphome/components/uart/uart_component_esp_idf.cpp index 7530856b1e..cffa3308eb 100644 --- a/esphome/components/uart/uart_component_esp_idf.cpp +++ b/esphome/components/uart/uart_component_esp_idf.cpp @@ -6,6 +6,9 @@ #include "esphome/core/defines.h" #include "esphome/core/helpers.h" #include "esphome/core/log.h" +#include "esphome/core/gpio.h" +#include "driver/gpio.h" +#include "soc/gpio_num.h" #ifdef USE_LOGGER #include "esphome/components/logger/logger.h" @@ -104,6 +107,13 @@ void IDFUARTComponent::load_settings(bool dump_config) { return; } + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1; int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1; int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1; diff --git a/esphome/components/uart/uart_component_libretiny.cpp b/esphome/components/uart/uart_component_libretiny.cpp index 8a7a301cfe..9c065fe5df 100644 --- a/esphome/components/uart/uart_component_libretiny.cpp +++ b/esphome/components/uart/uart_component_libretiny.cpp @@ -46,6 +46,13 @@ uint16_t LibreTinyUARTComponent::get_config() { } void LibreTinyUARTComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin(); int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin(); bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted(); diff --git a/esphome/components/uart/uart_component_rp2040.cpp b/esphome/components/uart/uart_component_rp2040.cpp index ae3042fb77..c78691653d 100644 --- a/esphome/components/uart/uart_component_rp2040.cpp +++ b/esphome/components/uart/uart_component_rp2040.cpp @@ -52,6 +52,13 @@ uint16_t RP2040UartComponent::get_config() { } void RP2040UartComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + uint16_t config = get_config(); constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28}); From 548913b471a7cdd4ca412ff29807fd4c012308d9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:12:32 -1000 Subject: [PATCH 257/336] Add gpio switch interlock compile tests (#11449) --- tests/components/gpio/common.yaml | 17 +++++++++++++++++ tests/components/gpio/test.esp32-c3-idf.yaml | 3 +++ tests/components/gpio/test.esp32-idf.yaml | 3 +++ tests/components/gpio/test.esp8266-ard.yaml | 3 +++ tests/components/gpio/test.nrf52-adafruit.yaml | 17 +++++++++++++++++ tests/components/gpio/test.nrf52-mcumgr.yaml | 17 +++++++++++++++++ tests/components/gpio/test.rp2040-ard.yaml | 3 +++ 7 files changed, 63 insertions(+) diff --git a/tests/components/gpio/common.yaml b/tests/components/gpio/common.yaml index 4e237349d9..b8e8fa81e4 100644 --- a/tests/components/gpio/common.yaml +++ b/tests/components/gpio/common.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: ${switch_pin} id: gpio_switch + + - platform: gpio + pin: ${switch_pin_2} + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: ${switch_pin_3} + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: ${switch_pin_4} + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.esp32-c3-idf.yaml b/tests/components/gpio/test.esp32-c3-idf.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.esp32-c3-idf.yaml +++ b/tests/components/gpio/test.esp32-c3-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml diff --git a/tests/components/gpio/test.esp32-idf.yaml b/tests/components/gpio/test.esp32-idf.yaml index 09f41abb79..862aa533ea 100644 --- a/tests/components/gpio/test.esp32-idf.yaml +++ b/tests/components/gpio/test.esp32-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO12 output_pin: GPIO13 switch_pin: GPIO14 + switch_pin_2: GPIO15 + switch_pin_3: GPIO16 + switch_pin_4: GPIO17 <<: !include common.yaml diff --git a/tests/components/gpio/test.esp8266-ard.yaml b/tests/components/gpio/test.esp8266-ard.yaml index e1660ec47c..e13b4520d1 100644 --- a/tests/components/gpio/test.esp8266-ard.yaml +++ b/tests/components/gpio/test.esp8266-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO0 output_pin: GPIO2 switch_pin: GPIO15 + switch_pin_2: GPIO12 + switch_pin_3: GPIO13 + switch_pin_4: GPIO14 <<: !include common.yaml diff --git a/tests/components/gpio/test.nrf52-adafruit.yaml b/tests/components/gpio/test.nrf52-adafruit.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-adafruit.yaml +++ b/tests/components/gpio/test.nrf52-adafruit.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.nrf52-mcumgr.yaml b/tests/components/gpio/test.nrf52-mcumgr.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-mcumgr.yaml +++ b/tests/components/gpio/test.nrf52-mcumgr.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.rp2040-ard.yaml b/tests/components/gpio/test.rp2040-ard.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.rp2040-ard.yaml +++ b/tests/components/gpio/test.rp2040-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml From 742eca92d892c4001c9ccd16148c36482a0df182 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 22 Oct 2025 10:22:56 +1300 Subject: [PATCH 258/336] [CI] Add auto label for chained PRs (#11457) --- .github/workflows/auto-label-pr.yml | 7 +++++-- .github/workflows/status-check-labels.yml | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/auto-label-pr.yml b/.github/workflows/auto-label-pr.yml index 1670bd1821..4e2f086f47 100644 --- a/.github/workflows/auto-label-pr.yml +++ b/.github/workflows/auto-label-pr.yml @@ -53,6 +53,7 @@ jobs: 'new-target-platform', 'merging-to-release', 'merging-to-beta', + 'chained-pr', 'core', 'small-pr', 'dashboard', @@ -140,6 +141,8 @@ jobs: labels.add('merging-to-release'); } else if (baseRef === 'beta') { labels.add('merging-to-beta'); + } else if (baseRef !== 'dev') { + labels.add('chained-pr'); } return labels; @@ -528,8 +531,8 @@ jobs: const apiData = await fetchApiData(); const baseRef = context.payload.pull_request.base.ref; - // Early exit for non-dev branches - if (baseRef !== 'dev') { + // Early exit for release and beta branches only + if (baseRef === 'release' || baseRef === 'beta') { const branchLabels = await detectMergeBranch(); const finalLabels = Array.from(branchLabels); diff --git a/.github/workflows/status-check-labels.yml b/.github/workflows/status-check-labels.yml index e44fd18132..cca70815b9 100644 --- a/.github/workflows/status-check-labels.yml +++ b/.github/workflows/status-check-labels.yml @@ -14,6 +14,7 @@ jobs: label: - needs-docs - merge-after-release + - chained-pr steps: - name: Check for ${{ matrix.label }} label uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 From f3f419077bb5d1b7d5eea3d735b8b0b4c58678c5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:29:27 -1000 Subject: [PATCH 259/336] [wifi] Optimize WiFi network storage with FixedVector --- esphome/components/wifi/__init__.py | 23 +++++++++++++++------- esphome/components/wifi/wifi_component.cpp | 7 ++----- esphome/components/wifi/wifi_component.h | 4 ++-- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 494470cb48..19c1f28f47 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -1,6 +1,7 @@ from esphome import automation from esphome.automation import Condition import esphome.codegen as cg +from esphome.codegen import MockObj from esphome.components.const import CONF_USE_PSRAM from esphome.components.esp32 import add_idf_sdkconfig_option, const, get_esp32_variant from esphome.components.network import IPAddress @@ -378,14 +379,22 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - def add_sta(ap, network): - ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + # Build all WiFiAP objects + networks = config.get(CONF_NETWORKS, []) + if networks: + wifi_aps: list[MockObj] = [] + for network in networks: + if CONF_EAP in network: + has_eap = True + ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) + # Create a WiFiAP variable for each network + ap_var = cg.new_variable(network[CONF_ID], WiFiAP()) + # Configure the WiFiAP + wifi_network(network, ap_var, ip_config) + wifi_aps.append(ap_var) - for network in config.get(CONF_NETWORKS, []): - if CONF_EAP in network: - has_eap = True - cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) + # Set all WiFi networks at once + cg.add(var.set_stas(wifi_aps)) if CONF_AP in config: conf = config[CONF_AP] diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index c89384d742..a7b66114c8 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -330,11 +330,8 @@ float WiFiComponent::get_loop_priority() const { return 10.0f; // before other loop components } -void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); } -void WiFiComponent::set_sta(const WiFiAP &ap) { - this->clear_sta(); - this->add_sta(ap); -} +void WiFiComponent::set_stas(const std::initializer_list &aps) { this->sta_ = aps; } +void WiFiComponent::set_sta(const WiFiAP &ap) { this->set_stas({ap}); } void WiFiComponent::clear_sta() { this->sta_.clear(); } void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &password) { SavedWifiSettings save{}; // zero-initialized - all bytes set to \0, guaranteeing null termination diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 10aa82a065..0bcfd7445a 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -219,7 +219,7 @@ class WiFiComponent : public Component { void set_sta(const WiFiAP &ap); WiFiAP get_sta() { return this->selected_ap_; } - void add_sta(const WiFiAP &ap); + void set_stas(const std::initializer_list &aps); void clear_sta(); #ifdef USE_WIFI_AP @@ -393,7 +393,7 @@ class WiFiComponent : public Component { #endif std::string use_address_; - std::vector sta_; + FixedVector sta_; std::vector sta_priorities_; wifi_scan_vector_t scan_result_; WiFiAP selected_ap_; From f2469077d9562c491520b0b732aba3c338ee2382 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:31:18 -1000 Subject: [PATCH 260/336] [light] Add tests for AddressableColorWipeEffectColor/StrobeLightEffectColor (#11456) --- tests/components/light/common.yaml | 40 ++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/tests/components/light/common.yaml b/tests/components/light/common.yaml index d4f64dcdea..f807014065 100644 --- a/tests/components/light/common.yaml +++ b/tests/components/light/common.yaml @@ -123,3 +123,43 @@ light: red: 100% green: 50% blue: 50% + # Test StrobeLightEffect with multiple colors + - platform: monochromatic + id: test_strobe_multiple + name: Strobe Multiple Colors + output: test_ledc_1 + effects: + - strobe: + name: Strobe Multi + colors: + - state: true + brightness: 100% + duration: 500ms + - state: false + duration: 250ms + - state: true + brightness: 50% + duration: 500ms + # Test StrobeLightEffect with transition + - platform: rgb + id: test_strobe_transition + name: Strobe With Transition + red: test_ledc_1 + green: test_ledc_2 + blue: test_ledc_3 + effects: + - strobe: + name: Strobe Transition + colors: + - state: true + red: 100% + green: 0% + blue: 0% + duration: 1s + transition_length: 500ms + - state: true + red: 0% + green: 100% + blue: 0% + duration: 1s + transition_length: 500ms From 9922c6591223821fe622cf71530544393c03bdb0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:32:48 -1000 Subject: [PATCH 261/336] Add compile tests for binary_sensor MultiClickTrigger (#11454) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- tests/components/binary_sensor/common.yaml | 66 ++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/tests/components/binary_sensor/common.yaml b/tests/components/binary_sensor/common.yaml index 6965c1feeb..e3fd159b08 100644 --- a/tests/components/binary_sensor/common.yaml +++ b/tests/components/binary_sensor/common.yaml @@ -70,3 +70,69 @@ binary_sensor: - delay: 10s time_off: 200ms time_on: 800ms + + # Test on_multi_click with single click + - platform: template + id: multi_click_single + name: "Multi Click Single" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Single click detected" + + # Test on_multi_click with double click + - platform: template + id: multi_click_double + name: "Multi Click Double" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Double click detected" + + # Test on_multi_click with complex pattern (5 events) + - platform: template + id: multi_click_complex + name: "Multi Click Complex" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + then: + - logger.log: "Complex pattern detected" + + # Test on_multi_click with custom invalid_cooldown + - platform: template + id: multi_click_cooldown + name: "Multi Click Cooldown" + on_multi_click: + - timing: + - state: true + min_length: 100ms + max_length: 500ms + invalid_cooldown: 2s + then: + - logger.log: "Click with custom cooldown" From 6f7db2f5f77a7fb32e938588f52ec910c16b50d8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:35:34 -1000 Subject: [PATCH 262/336] [gpio] Optimize switch interlock with FixedVector (#11448) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- esphome/components/gpio/switch/gpio_switch.cpp | 2 +- esphome/components/gpio/switch/gpio_switch.h | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/esphome/components/gpio/switch/gpio_switch.cpp b/esphome/components/gpio/switch/gpio_switch.cpp index b67af5e95d..9043a6a493 100644 --- a/esphome/components/gpio/switch/gpio_switch.cpp +++ b/esphome/components/gpio/switch/gpio_switch.cpp @@ -67,7 +67,7 @@ void GPIOSwitch::write_state(bool state) { this->pin_->digital_write(state); this->publish_state(state); } -void GPIOSwitch::set_interlock(const std::vector &interlock) { this->interlock_ = interlock; } +void GPIOSwitch::set_interlock(const std::initializer_list &interlock) { this->interlock_ = interlock; } } // namespace gpio } // namespace esphome diff --git a/esphome/components/gpio/switch/gpio_switch.h b/esphome/components/gpio/switch/gpio_switch.h index 94d49745b5..080decac08 100644 --- a/esphome/components/gpio/switch/gpio_switch.h +++ b/esphome/components/gpio/switch/gpio_switch.h @@ -2,10 +2,9 @@ #include "esphome/core/component.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/switch/switch.h" -#include - namespace esphome { namespace gpio { @@ -19,14 +18,14 @@ class GPIOSwitch : public switch_::Switch, public Component { void setup() override; void dump_config() override; - void set_interlock(const std::vector &interlock); + void set_interlock(const std::initializer_list &interlock); void set_interlock_wait_time(uint32_t interlock_wait_time) { interlock_wait_time_ = interlock_wait_time; } protected: void write_state(bool state) override; GPIOPin *pin_; - std::vector interlock_; + FixedVector interlock_; uint32_t interlock_wait_time_{0}; }; From 88e3f02c9c3c505667e7bf86dc5dde029b71a98d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:40:48 -1000 Subject: [PATCH 263/336] try to avoid some of the ram --- esphome/core/helpers.h | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index 234d2a7d7d..9b0591c9c5 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -194,12 +194,8 @@ template class FixedVector { size_ = 0; } - public: - FixedVector() = default; - - /// Constructor from initializer list - allocates exact size needed - /// This enables brace initialization: FixedVector v = {1, 2, 3}; - FixedVector(std::initializer_list init_list) { + // Helper to assign from initializer list (shared by constructor and assignment operator) + void assign_from_initializer_list_(std::initializer_list init_list) { init(init_list.size()); size_t idx = 0; for (const auto &item : init_list) { @@ -209,6 +205,13 @@ template class FixedVector { size_ = init_list.size(); } + public: + FixedVector() = default; + + /// Constructor from initializer list - allocates exact size needed + /// This enables brace initialization: FixedVector v = {1, 2, 3}; + FixedVector(std::initializer_list init_list) { assign_from_initializer_list_(init_list); } + ~FixedVector() { cleanup_(); } // Disable copy operations (avoid accidental expensive copies) @@ -234,6 +237,15 @@ template class FixedVector { return *this; } + /// Assignment from initializer list - avoids temporary and move overhead + /// This enables: FixedVector v; v = {1, 2, 3}; + FixedVector &operator=(std::initializer_list init_list) { + cleanup_(); + reset_(); + assign_from_initializer_list_(init_list); + return *this; + } + // Allocate capacity - can be called multiple times to reinit void init(size_t n) { cleanup_(); From 660411ac42b1304b15965d7af004c247015eb72d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:44:56 -1000 Subject: [PATCH 264/336] try to avoid some of the ram --- esphome/components/wifi/__init__.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 19c1f28f47..97f517713d 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -1,7 +1,6 @@ from esphome import automation from esphome.automation import Condition import esphome.codegen as cg -from esphome.codegen import MockObj from esphome.components.const import CONF_USE_PSRAM from esphome.components.esp32 import add_idf_sdkconfig_option, const, get_esp32_variant from esphome.components.network import IPAddress @@ -379,19 +378,16 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - # Build all WiFiAP objects + # Build all WiFiAP objects as StructInitializers (not variables) networks = config.get(CONF_NETWORKS, []) if networks: - wifi_aps: list[MockObj] = [] + wifi_aps = [] for network in networks: if CONF_EAP in network: has_eap = True ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - # Create a WiFiAP variable for each network - ap_var = cg.new_variable(network[CONF_ID], WiFiAP()) - # Configure the WiFiAP - wifi_network(network, ap_var, ip_config) - wifi_aps.append(ap_var) + # Create StructInitializer for each network (avoids global variables) + wifi_aps.append(wifi_network(network, WiFiAP(), ip_config)) # Set all WiFi networks at once cg.add(var.set_stas(wifi_aps)) From 294826491779c4bb6f9b11c3f248fc008fa948f7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:46:30 -1000 Subject: [PATCH 265/336] try to avoid some of the ram --- esphome/components/wifi/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 97f517713d..76155763fb 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -378,7 +378,7 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - # Build all WiFiAP objects as StructInitializers (not variables) + # Build all WiFiAP objects networks = config.get(CONF_NETWORKS, []) if networks: wifi_aps = [] @@ -386,7 +386,6 @@ async def to_code(config): if CONF_EAP in network: has_eap = True ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - # Create StructInitializer for each network (avoids global variables) wifi_aps.append(wifi_network(network, WiFiAP(), ip_config)) # Set all WiFi networks at once From 8500323d397cefa73dca7951418ff41e2f3857e5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:47:31 -1000 Subject: [PATCH 266/336] [esp32] Add advanced options to disable unused VFS features (saves ~8.7 KB flash) (#11441) --- esphome/components/esp32/__init__.py | 70 +++++++++++++++++++++++ esphome/components/openthread/__init__.py | 10 ++++ 2 files changed, 80 insertions(+) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index 99a87e06f9..cb6354cc74 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -550,6 +550,32 @@ CONF_ENABLE_LWIP_BRIDGE_INTERFACE = "enable_lwip_bridge_interface" CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING = "enable_lwip_tcpip_core_locking" CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety" CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram" +CONF_DISABLE_VFS_SUPPORT_TERMIOS = "disable_vfs_support_termios" +CONF_DISABLE_VFS_SUPPORT_SELECT = "disable_vfs_support_select" +CONF_DISABLE_VFS_SUPPORT_DIR = "disable_vfs_support_dir" + +# VFS requirement tracking +# Components that need VFS features can call require_vfs_select() or require_vfs_dir() +KEY_VFS_SELECT_REQUIRED = "vfs_select_required" +KEY_VFS_DIR_REQUIRED = "vfs_dir_required" + + +def require_vfs_select() -> None: + """Mark that VFS select support is required by a component. + + Call this from components that use esp_vfs_eventfd or other VFS select features. + This prevents CONFIG_VFS_SUPPORT_SELECT from being disabled. + """ + CORE.data[KEY_VFS_SELECT_REQUIRED] = True + + +def require_vfs_dir() -> None: + """Mark that VFS directory support is required by a component. + + Call this from components that use directory functions (opendir, readdir, mkdir, etc.). + This prevents CONFIG_VFS_SUPPORT_DIR from being disabled. + """ + CORE.data[KEY_VFS_DIR_REQUIRED] = True def _validate_idf_component(config: ConfigType) -> ConfigType: @@ -615,6 +641,13 @@ FRAMEWORK_SCHEMA = cv.All( cv.Optional( CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True + ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_SELECT, default=True + ): cv.boolean, + cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean, cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean, } ), @@ -962,6 +995,43 @@ async def to_code(config): if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True): add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False) + # Disable VFS support for termios (terminal I/O functions) + # ESPHome doesn't use termios functions on ESP32 (only used in host UART driver). + # Saves approximately 1.8KB of flash when disabled (default). + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_TERMIOS", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True), + ) + + # Disable VFS support for select() with file descriptors + # ESPHome only uses select() with sockets via lwip_select(), which still works. + # VFS select is only needed for UART/eventfd file descriptors. + # Components that need it (e.g., openthread) call require_vfs_select(). + # Saves approximately 2.7KB of flash when disabled (default). + if CORE.data.get(KEY_VFS_SELECT_REQUIRED, False): + # Component requires VFS select - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_SELECT", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_SELECT", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True), + ) + + # Disable VFS support for directory functions (opendir, readdir, mkdir, etc.) + # ESPHome doesn't use directory functions on ESP32. + # Components that need it (e.g., storage components) call require_vfs_dir(). + # Saves approximately 0.5KB+ of flash when disabled (default). + if CORE.data.get(KEY_VFS_DIR_REQUIRED, False): + # Component requires VFS directory support - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_DIR", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_DIR", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True), + ) + cg.add_platformio_option("board_build.partitions", "partitions.csv") if CONF_PARTITIONS in config: add_extra_build_file( diff --git a/esphome/components/openthread/__init__.py b/esphome/components/openthread/__init__.py index 3fac497c3d..4865399d02 100644 --- a/esphome/components/openthread/__init__.py +++ b/esphome/components/openthread/__init__.py @@ -4,6 +4,7 @@ from esphome.components.esp32 import ( VARIANT_ESP32H2, add_idf_sdkconfig_option, only_on_variant, + require_vfs_select, ) from esphome.components.mdns import MDNSComponent, enable_mdns_storage import esphome.config_validation as cv @@ -106,6 +107,14 @@ _CONNECTION_SCHEMA = cv.Schema( } ) + +def _require_vfs_select(config): + """Register VFS select requirement during config validation.""" + # OpenThread uses esp_vfs_eventfd which requires VFS select support + require_vfs_select() + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -122,6 +131,7 @@ CONFIG_SCHEMA = cv.All( cv.has_exactly_one_key(CONF_NETWORK_KEY, CONF_TLV), cv.only_with_esp_idf, only_on_variant(supported=[VARIANT_ESP32C6, VARIANT_ESP32H2]), + _require_vfs_select, ) From 02e1ed21308ec73a56be54cc282dc17d5d3c421a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 11:57:06 -1000 Subject: [PATCH 267/336] multiple networks --- tests/components/wifi/common.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/components/wifi/common.yaml b/tests/components/wifi/common.yaml index 343d44b177..af27f85092 100644 --- a/tests/components/wifi/common.yaml +++ b/tests/components/wifi/common.yaml @@ -12,5 +12,8 @@ esphome: - logger.log: "Failed to connect to WiFi!" wifi: - ssid: MySSID - password: password1 + networks: + - ssid: MySSID + password: password1 + - ssid: MySSID2 + password: password2 From 1ea80594c66bd7fb3af2fb326fe853e6ad85f853 Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Tue, 21 Oct 2025 15:11:11 -0700 Subject: [PATCH 268/336] [light] Improve gamma correction precision (#11141) Co-authored-by: J. Nick Koston Co-authored-by: J. Nick Koston --- esphome/components/light/esp_color_correction.h | 8 ++++---- esphome/core/color.h | 9 +++++++++ 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/esphome/components/light/esp_color_correction.h b/esphome/components/light/esp_color_correction.h index 979a1acb07..14c065058c 100644 --- a/esphome/components/light/esp_color_correction.h +++ b/esphome/components/light/esp_color_correction.h @@ -17,19 +17,19 @@ class ESPColorCorrection { this->color_correct_blue(color.blue), this->color_correct_white(color.white)); } inline uint8_t color_correct_red(uint8_t red) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(red, this->max_brightness_.red), this->local_brightness_); + uint8_t res = esp_scale8_twice(red, this->max_brightness_.red, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_green(uint8_t green) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(green, this->max_brightness_.green), this->local_brightness_); + uint8_t res = esp_scale8_twice(green, this->max_brightness_.green, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_blue(uint8_t blue) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(blue, this->max_brightness_.blue), this->local_brightness_); + uint8_t res = esp_scale8_twice(blue, this->max_brightness_.blue, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_white(uint8_t white) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(white, this->max_brightness_.white), this->local_brightness_); + uint8_t res = esp_scale8_twice(white, this->max_brightness_.white, this->local_brightness_); return this->gamma_table_[res]; } inline Color color_uncorrect(Color color) const ESPHOME_ALWAYS_INLINE { diff --git a/esphome/core/color.h b/esphome/core/color.h index 5dce58a485..4b0ae5b57a 100644 --- a/esphome/core/color.h +++ b/esphome/core/color.h @@ -14,6 +14,15 @@ inline static constexpr uint8_t esp_scale8(uint8_t i, uint8_t scale) { return (uint16_t(i) * (1 + uint16_t(scale))) / 256; } +/// Scale an 8-bit value by two 8-bit scale factors with improved precision. +/// This is more accurate than calling esp_scale8() twice because it delays +/// truncation until after both multiplications, preserving intermediate precision. +/// For example: esp_scale8_twice(value, max_brightness, local_brightness) +/// gives better results than esp_scale8(esp_scale8(value, max_brightness), local_brightness) +inline static constexpr uint8_t esp_scale8_twice(uint8_t i, uint8_t scale1, uint8_t scale2) { + return (uint32_t(i) * (1 + uint32_t(scale1)) * (1 + uint32_t(scale2))) >> 16; +} + struct Color { union { struct { From 3f76a67c6564201ffae7ee9ad834a2d2ea686b11 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 12:17:16 -1000 Subject: [PATCH 269/336] [wifi] Test multiple stas in wifi compile tests --- tests/components/wifi/common.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/components/wifi/common.yaml b/tests/components/wifi/common.yaml index 343d44b177..af27f85092 100644 --- a/tests/components/wifi/common.yaml +++ b/tests/components/wifi/common.yaml @@ -12,5 +12,8 @@ esphome: - logger.log: "Failed to connect to WiFi!" wifi: - ssid: MySSID - password: password1 + networks: + - ssid: MySSID + password: password1 + - ssid: MySSID2 + password: password2 From ae50a09b4e26c8acee06242ca7171a128beae242 Mon Sep 17 00:00:00 2001 From: Javier Peletier Date: Wed, 22 Oct 2025 00:21:22 +0200 Subject: [PATCH 270/336] C++ components unit test framework (#9284) Co-authored-by: J. Nick Koston Co-authored-by: J. Nick Koston --- .clang-tidy.hash | 2 +- .github/workflows/ci.yml | 31 ++++ platformio.ini | 4 + script/cpp_unit_test.py | 172 +++++++++++++++++++++++ script/determine-jobs.py | 53 ++++++- script/extract_automations.py | 11 +- script/helpers.py | 114 ++++++++++++++- script/list-components.py | 43 ++++-- tests/components/.gitignore | 5 + tests/components/README.md | 32 +++++ tests/components/main.cpp | 26 ++++ tests/components/uart/common.h | 37 +++++ tests/components/uart/uart_component.cpp | 73 ++++++++++ tests/components/uart/uart_device.cpp | 108 ++++++++++++++ tests/script/test_determine_jobs.py | 79 +++++------ 15 files changed, 710 insertions(+), 80 deletions(-) create mode 100755 script/cpp_unit_test.py create mode 100644 tests/components/.gitignore create mode 100644 tests/components/README.md create mode 100644 tests/components/main.cpp create mode 100644 tests/components/uart/common.h create mode 100644 tests/components/uart/uart_component.cpp create mode 100644 tests/components/uart/uart_device.cpp diff --git a/.clang-tidy.hash b/.clang-tidy.hash index 2cd4319325..3ade00f0cd 100644 --- a/.clang-tidy.hash +++ b/.clang-tidy.hash @@ -1 +1 @@ -d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248 +3d46b63015d761c85ca9cb77ab79a389509e5776701fb22aed16e7b79d432c0c diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f085aedcc0..cb04f6bf8d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -178,6 +178,8 @@ jobs: component-test-count: ${{ steps.determine.outputs.component-test-count }} changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }} memory_impact: ${{ steps.determine.outputs.memory-impact }} + cpp-unit-tests-run-all: ${{ steps.determine.outputs.cpp-unit-tests-run-all }} + cpp-unit-tests-components: ${{ steps.determine.outputs.cpp-unit-tests-components }} steps: - name: Check out code from GitHub uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -210,6 +212,8 @@ jobs: echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT + echo "cpp-unit-tests-run-all=$(echo "$output" | jq -r '.cpp_unit_tests_run_all')" >> $GITHUB_OUTPUT + echo "cpp-unit-tests-components=$(echo "$output" | jq -c '.cpp_unit_tests_components')" >> $GITHUB_OUTPUT integration-tests: name: Run integration tests @@ -247,6 +251,33 @@ jobs: . venv/bin/activate pytest -vv --no-cov --tb=native -n auto tests/integration/ + cpp-unit-tests: + name: Run C++ unit tests + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]') + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Run cpp_unit_test.py + run: | + . venv/bin/activate + if [ "${{ needs.determine-jobs.outputs.cpp-unit-tests-run-all }}" = "true" ]; then + script/cpp_unit_test.py --all + else + ARGS=$(echo '${{ needs.determine-jobs.outputs.cpp-unit-tests-components }}' | jq -r '.[] | @sh' | xargs) + script/cpp_unit_test.py $ARGS + fi + clang-tidy-single: name: ${{ matrix.name }} runs-on: ubuntu-24.04 diff --git a/platformio.ini b/platformio.ini index 6b2a8657bb..94f58f84ab 100644 --- a/platformio.ini +++ b/platformio.ini @@ -46,6 +46,10 @@ lib_deps = ; This is using the repository until a new release is published to PlatformIO https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library lvgl/lvgl@8.4.0 ; lvgl + ; This dependency is used only in unit tests. + ; Must coincide with PLATFORMIO_GOOGLE_TEST_LIB in scripts/cpp_unit_test.py + ; See scripts/cpp_unit_test.py and tests/components/README.md + google/googletest@^1.15.2 build_flags = -DESPHOME_LOG_LEVEL=ESPHOME_LOG_LEVEL_VERY_VERBOSE -std=gnu++20 diff --git a/script/cpp_unit_test.py b/script/cpp_unit_test.py new file mode 100755 index 0000000000..e97b5bd7b0 --- /dev/null +++ b/script/cpp_unit_test.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 +import argparse +import hashlib +import os +from pathlib import Path +import subprocess +import sys + +from helpers import get_all_components, get_all_dependencies, root_path + +from esphome.__main__ import command_compile, parse_args +from esphome.config import validate_config +from esphome.core import CORE +from esphome.platformio_api import get_idedata + +# This must coincide with the version in /platformio.ini +PLATFORMIO_GOOGLE_TEST_LIB = "google/googletest@^1.15.2" + +# Path to /tests/components +COMPONENTS_TESTS_DIR: Path = Path(root_path) / "tests" / "components" + + +def hash_components(components: list[str]) -> str: + key = ",".join(components) + return hashlib.sha256(key.encode()).hexdigest()[:16] + + +def filter_components_without_tests(components: list[str]) -> list[str]: + """Filter out components that do not have a corresponding test file. + + This is done by checking if the component's directory contains at + least a .cpp file. + """ + filtered_components: list[str] = [] + for component in components: + test_dir = COMPONENTS_TESTS_DIR / component + if test_dir.is_dir() and any(test_dir.glob("*.cpp")): + filtered_components.append(component) + else: + print( + f"WARNING: No tests found for component '{component}', skipping.", + file=sys.stderr, + ) + return filtered_components + + +def create_test_config(config_name: str, includes: list[str]) -> dict: + """Create ESPHome test configuration for C++ unit tests. + + Args: + config_name: Unique name for this test configuration + includes: List of include folders for the test build + + Returns: + Configuration dict for ESPHome + """ + return { + "esphome": { + "name": config_name, + "friendly_name": "CPP Unit Tests", + "libraries": PLATFORMIO_GOOGLE_TEST_LIB, + "platformio_options": { + "build_type": "debug", + "build_unflags": [ + "-Os", # remove size-opt flag + ], + "build_flags": [ + "-Og", # optimize for debug + ], + "debug_build_flags": [ # only for debug builds + "-g3", # max debug info + "-ggdb3", + ], + }, + "includes": includes, + }, + "host": {}, + "logger": {"level": "DEBUG"}, + } + + +def run_tests(selected_components: list[str]) -> int: + # Skip tests on Windows + if os.name == "nt": + print("Skipping esphome tests on Windows", file=sys.stderr) + return 1 + + # Remove components that do not have tests + components = filter_components_without_tests(selected_components) + + if len(components) == 0: + print( + "No components specified or no tests found for the specified components.", + file=sys.stderr, + ) + return 0 + + components = sorted(components) + + # Obtain possible dependencies for the requested components: + components_with_dependencies = sorted(get_all_dependencies(set(components))) + + # Build a list of include folders, one folder per component containing tests. + # A special replacement main.cpp is located in /tests/components/main.cpp + includes: list[str] = ["main.cpp"] + components + + # Create a unique name for this config based on the actual components being tested + # to maximize cache during testing + config_name: str = "cpptests-" + hash_components(components) + + config = create_test_config(config_name, includes) + + CORE.config_path = COMPONENTS_TESTS_DIR / "dummy.yaml" + CORE.dashboard = None + + # Validate config will expand the above with defaults: + config = validate_config(config, {}) + + # Add all components and dependencies to the base configuration after validation, so their files + # are added to the build. + config.update({key: {} for key in components_with_dependencies}) + + print(f"Testing components: {', '.join(components)}") + CORE.config = config + args = parse_args(["program", "compile", str(CORE.config_path)]) + try: + exit_code: int = command_compile(args, config) + + if exit_code != 0: + print(f"Error compiling unit tests for {', '.join(components)}") + return exit_code + except Exception as e: + print( + f"Error compiling unit tests for {', '.join(components)}. Check path. : {e}" + ) + return 2 + + # After a successful compilation, locate the executable and run it: + idedata = get_idedata(config) + if idedata is None: + print("Cannot find executable") + return 1 + + program_path: str = idedata.raw["prog_path"] + run_cmd: list[str] = [program_path] + run_proc = subprocess.run(run_cmd, check=False) + return run_proc.returncode + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Run C++ unit tests for ESPHome components." + ) + parser.add_argument( + "components", + nargs="*", + help="List of components to test. Use --all to test all known components.", + ) + parser.add_argument("--all", action="store_true", help="Test all known components.") + + args = parser.parse_args() + + if args.all: + components: list[str] = get_all_components() + else: + components: list[str] = args.components + + sys.exit(run_tests(components)) + + +if __name__ == "__main__": + main() diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 9721fd9756..6651553ce7 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -52,13 +52,16 @@ from helpers import ( CPP_FILE_EXTENSIONS, PYTHON_FILE_EXTENSIONS, changed_files, - filter_component_files, + core_changed, + filter_component_and_test_cpp_files, + filter_component_and_test_files, get_all_dependencies, get_changed_components, get_component_from_path, get_component_test_files, get_components_from_integration_fixtures, get_components_with_dependencies, + get_cpp_changed_components, git_ls_files, parse_test_filename, root_path, @@ -143,10 +146,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool: """ files = changed_files(branch) - # Check if any core files changed (esphome/core/*) - for file in files: - if file.startswith("esphome/core/"): - return True + if core_changed(files): + # If any core files changed, run integration tests + return True # Check if any integration test files changed if any("tests/integration" in file for file in files): @@ -283,6 +285,40 @@ def should_run_python_linters(branch: str | None = None) -> bool: return _any_changed_file_endswith(branch, PYTHON_FILE_EXTENSIONS) +def determine_cpp_unit_tests( + branch: str | None = None, +) -> tuple[bool, list[str]]: + """Determine if C++ unit tests should run based on changed files. + + This function is used by the CI workflow to skip C++ unit tests when + no relevant files have changed, saving CI time and resources. + + C++ unit tests will run when any of the following conditions are met: + + 1. Any C++ core source files changed (esphome/core/*), in which case + all cpp unit tests run. + 2. A test file for a component changed, which triggers tests for that + component. + 3. The code for a component changed, which triggers tests for that + component and all components that depend on it. + + Args: + branch: Branch to compare against. If None, uses default. + + Returns: + Tuple of (run_all, components) where: + - run_all: True if all tests should run, False otherwise + - components: List of specific components to test (empty if run_all) + """ + files = changed_files(branch) + if core_changed(files): + return (True, []) + + # Filter to only C++ files + cpp_files = list(filter(filter_component_and_test_cpp_files, files)) + return (False, get_cpp_changed_components(cpp_files)) + + def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) -> bool: """Check if a changed file ends with any of the specified extensions.""" return any(file.endswith(extensions) for file in changed_files(branch)) @@ -579,7 +615,7 @@ def main() -> None: else: # Get both directly changed and all changed (with dependencies) changed = changed_files(args.branch) - component_files = [f for f in changed if filter_component_files(f)] + component_files = [f for f in changed if filter_component_and_test_files(f)] directly_changed_components = get_components_with_dependencies( component_files, False @@ -646,6 +682,9 @@ def main() -> None: files_to_check_count = 0 # Build output + # Determine which C++ unit tests to run + cpp_run_all, cpp_components = determine_cpp_unit_tests(args.branch) + output: dict[str, Any] = { "integration_tests": run_integration, "clang_tidy": run_clang_tidy, @@ -661,6 +700,8 @@ def main() -> None: "dependency_only_count": len(dependency_only_components), "changed_cpp_file_count": changed_cpp_file_count, "memory_impact": memory_impact, + "cpp_unit_tests_run_all": cpp_run_all, + "cpp_unit_tests_components": cpp_components, } # Output as JSON diff --git a/script/extract_automations.py b/script/extract_automations.py index 943eb7110a..4e650ce25f 100755 --- a/script/extract_automations.py +++ b/script/extract_automations.py @@ -2,19 +2,14 @@ import json -from helpers import git_ls_files +from helpers import get_all_component_files, get_components_with_dependencies from esphome.automation import ACTION_REGISTRY, CONDITION_REGISTRY from esphome.pins import PIN_SCHEMA_REGISTRY -list_components = __import__("list-components") - - if __name__ == "__main__": - files = git_ls_files() - files = filter(list_components.filter_component_files, files) - - components = list_components.get_components(files, True) + files = get_all_component_files() + components = get_components_with_dependencies(files, True) dump = { "actions": sorted(list(ACTION_REGISTRY.keys())), diff --git a/script/helpers.py b/script/helpers.py index 6b2bb2daef..78c11b427e 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -25,12 +25,21 @@ CPP_FILE_EXTENSIONS = (".cpp", ".h", ".hpp", ".cc", ".cxx", ".c", ".tcc") # Python file extensions PYTHON_FILE_EXTENSIONS = (".py", ".pyi") +# Combined C++ and Python file extensions for convenience +CPP_AND_PYTHON_FILE_EXTENSIONS = (*CPP_FILE_EXTENSIONS, *PYTHON_FILE_EXTENSIONS) + # YAML file extensions YAML_FILE_EXTENSIONS = (".yaml", ".yml") # Component path prefix ESPHOME_COMPONENTS_PATH = "esphome/components/" +# Test components path prefix +ESPHOME_TESTS_COMPONENTS_PATH = "tests/components/" + +# Tuple of component and test paths for efficient startswith checks +COMPONENT_AND_TESTS_PATHS = (ESPHOME_COMPONENTS_PATH, ESPHOME_TESTS_COMPONENTS_PATH) + # Base bus components - these ARE the bus implementations and should not # be flagged as needing migration since they are the platform/base components BASE_BUS_COMPONENTS = { @@ -658,17 +667,32 @@ def get_components_from_integration_fixtures() -> set[str]: return components -def filter_component_files(file_path: str) -> bool: - """Check if a file path is a component file. +def filter_component_and_test_files(file_path: str) -> bool: + """Check if a file path is a component or test file. Args: file_path: Path to check Returns: - True if the file is in a component directory + True if the file is in a component or test directory """ - return file_path.startswith("esphome/components/") or file_path.startswith( - "tests/components/" + return file_path.startswith(COMPONENT_AND_TESTS_PATHS) or ( + file_path.startswith(ESPHOME_TESTS_COMPONENTS_PATH) + and file_path.endswith(YAML_FILE_EXTENSIONS) + ) + + +def filter_component_and_test_cpp_files(file_path: str) -> bool: + """Check if a file is a C++ source file in component or test directories. + + Args: + file_path: Path to check + + Returns: + True if the file is a C++ source/header file in component or test directories + """ + return file_path.endswith(CPP_FILE_EXTENSIONS) and file_path.startswith( + COMPONENT_AND_TESTS_PATHS ) @@ -740,7 +764,7 @@ def create_components_graph() -> dict[str, list[str]]: # The root directory of the repo root = Path(__file__).parent.parent - components_dir = root / "esphome" / "components" + components_dir = root / ESPHOME_COMPONENTS_PATH # Fake some directory so that get_component works CORE.config_path = root # Various configuration to capture different outcomes used by `AUTO_LOAD` function. @@ -873,3 +897,81 @@ def get_components_with_dependencies( return sorted(all_changed_components) return sorted(components) + + +def get_all_component_files() -> list[str]: + """Get all component and test files from git. + + Returns: + List of all component and test file paths + """ + files = git_ls_files() + return list(filter(filter_component_and_test_files, files)) + + +def get_all_components() -> list[str]: + """Get all component names. + + This function uses git to find all component files and extracts the component names. + It returns the same list as calling list-components.py without arguments. + + Returns: + List of all component names + """ + return get_components_with_dependencies(get_all_component_files(), False) + + +def core_changed(files: list[str]) -> bool: + """Check if any core C++ or Python files have changed. + + Args: + files: List of file paths to check + + Returns: + True if any core C++ or Python files have changed + """ + return any( + f.startswith("esphome/core/") and f.endswith(CPP_AND_PYTHON_FILE_EXTENSIONS) + for f in files + ) + + +def get_cpp_changed_components(files: list[str]) -> list[str]: + """Get components that have changed C++ files or tests. + + This function analyzes a list of changed files and determines which components + are affected. It handles two scenarios: + + 1. Test files changed (tests/components//*.cpp): + - Adds the component to the affected list + - Only that component needs to be tested + + 2. Component C++ files changed (esphome/components//*): + - Adds the component to the affected list + - Also adds all components that depend on this component (recursively) + - This ensures that changes propagate to dependent components + + Args: + files: List of file paths to analyze (should be C++ files) + + Returns: + Sorted list of component names that need C++ unit tests run + """ + components_graph = create_components_graph() + affected: set[str] = set() + for file in files: + if not file.endswith(CPP_FILE_EXTENSIONS): + continue + if file.startswith(ESPHOME_TESTS_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 4: + component_dir = Path(ESPHOME_TESTS_COMPONENTS_PATH) / parts[2] + if component_dir.is_dir(): + affected.add(parts[2]) + elif file.startswith(ESPHOME_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 4: + component = parts[2] + affected.update(find_children_of_component(components_graph, component)) + affected.add(component) + return sorted(affected) diff --git a/script/list-components.py b/script/list-components.py index d768256c71..31a1609f88 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -3,18 +3,14 @@ import argparse from helpers import ( changed_files, - filter_component_files, + filter_component_and_test_cpp_files, + filter_component_and_test_files, + get_all_component_files, get_components_with_dependencies, - git_ls_files, + get_cpp_changed_components, ) -def get_all_component_files() -> list[str]: - """Get all component files from git.""" - files = git_ls_files() - return list(filter(filter_component_files, files)) - - def main(): parser = argparse.ArgumentParser() parser.add_argument( @@ -39,16 +35,29 @@ def main(): parser.add_argument( "-b", "--branch", help="Branch to compare changed files against" ) + parser.add_argument( + "--cpp-changed", + action="store_true", + help="List components with changed C++ files", + ) args = parser.parse_args() if args.branch and not ( - args.changed or args.changed_direct or args.changed_with_deps + args.changed + or args.changed_direct + or args.changed_with_deps + or args.cpp_changed ): parser.error( - "--branch requires --changed, --changed-direct, or --changed-with-deps" + "--branch requires --changed, --changed-direct, --changed-with-deps, or --cpp-changed" ) - if args.changed or args.changed_direct or args.changed_with_deps: + if ( + args.changed + or args.changed_direct + or args.changed_with_deps + or args.cpp_changed + ): # When --changed* is passed, only get the changed files changed = changed_files(args.branch) @@ -68,6 +77,11 @@ def main(): # - --changed-with-deps: Used by CI test determination (script/determine-jobs.py) # Returns: Components with code changes + their dependencies (not infrastructure) # Reason: CI needs to test changed components and their dependents + # + # - --cpp-changed: Used by CI to determine if any C++ files changed (script/determine-jobs.py) + # Returns: Only components with changed C++ files + # Reason: Only components with C++ changes need C++ testing + base_test_changed = any( "tests/test_build_components" in file for file in changed ) @@ -80,7 +94,7 @@ def main(): # Only look at changed component files (ignore infrastructure changes) # For --changed-direct: only actual component code changes matter (for isolation) # For --changed-with-deps: only actual component code changes matter (for testing) - files = [f for f in changed if filter_component_files(f)] + files = [f for f in changed if filter_component_and_test_files(f)] else: # Get all component files files = get_all_component_files() @@ -100,6 +114,11 @@ def main(): # Return only directly changed components (without dependencies) for c in get_components_with_dependencies(files, False): print(c) + elif args.cpp_changed: + # Only look at changed cpp files + files = list(filter(filter_component_and_test_cpp_files, changed)) + for c in get_cpp_changed_components(files): + print(c) else: # Return all changed components (with dependencies) - default behavior for c in get_components_with_dependencies(files, args.changed): diff --git a/tests/components/.gitignore b/tests/components/.gitignore new file mode 100644 index 0000000000..d8b4157aef --- /dev/null +++ b/tests/components/.gitignore @@ -0,0 +1,5 @@ +# Gitignore settings for ESPHome +# This is an example and may include too much for your use-case. +# You can modify this file to suit your needs. +/.esphome/ +/secrets.yaml diff --git a/tests/components/README.md b/tests/components/README.md new file mode 100644 index 0000000000..0901f2ef17 --- /dev/null +++ b/tests/components/README.md @@ -0,0 +1,32 @@ +# How to write C++ ESPHome unit tests + +1. Locate the folder with your component or create a new one with the same name as the component. +2. Write the tests. You can add as many `.cpp` and `.h` files as you need to organize your tests. + +**IMPORTANT**: wrap all your testing code in a unique namespace to avoid linker collisions when compiling +testing binaries that combine many components. By convention, this unique namespace is `esphome::component::testing` +(where "component" is the component under test), for example: `esphome::uart::testing`. + + +## Running component unit tests + +(from the repository root) +```bash +./script/cpp_unit_test.py component1 component2 ... +``` + +The above will compile and run the provided components and their tests. + +To run all tests, you can invoke `cpp_unit_test.py` with the special `--all` flag: + +```bash +./script/cpp_unit_test.py --all +``` + +To run a specific test suite, you can provide a Google Test filter: + +```bash +GTEST_FILTER='UART*' ./script/cpp_unit_test.py uart modbus +``` + +The process will return `0` for success or nonzero for failure. In case of failure, the errors will be printed out to the console. diff --git a/tests/components/main.cpp b/tests/components/main.cpp new file mode 100644 index 0000000000..928f0e6059 --- /dev/null +++ b/tests/components/main.cpp @@ -0,0 +1,26 @@ +#include + +/* +This special main.cpp replaces the default one. +It will run all the Google Tests found in all compiled cpp files and then exit with the result +See README.md for more information +*/ + +// Auto generated code by esphome +// ========== AUTO GENERATED INCLUDE BLOCK BEGIN =========== +// ========== AUTO GENERATED INCLUDE BLOCK END ===========" + +void original_setup() { + // This function won't be run. + + // ========== AUTO GENERATED CODE BEGIN =========== + // =========== AUTO GENERATED CODE END ============ +} + +void setup() { + ::testing::InitGoogleTest(); + int exit_code = RUN_ALL_TESTS(); + exit(exit_code); +} + +void loop() {} diff --git a/tests/components/uart/common.h b/tests/components/uart/common.h new file mode 100644 index 0000000000..5597b86410 --- /dev/null +++ b/tests/components/uart/common.h @@ -0,0 +1,37 @@ +#pragma once +#include +#include +#include +#include +#include +#include "esphome/components/uart/uart_component.h" + +namespace esphome::uart::testing { + +using ::testing::_; +using ::testing::Return; +using ::testing::SaveArg; +using ::testing::DoAll; +using ::testing::Invoke; +using ::testing::SetArgPointee; + +// Derive a mock from UARTComponent to test the wrapper implementations. +class MockUARTComponent : public UARTComponent { + public: + using UARTComponent::write_array; + using UARTComponent::write_byte; + + // NOTE: std::vector is used here for test convenience. For production code, + // consider using StaticVector or FixedVector from esphome/core/helpers.h instead. + std::vector written_data; + + void write_array(const uint8_t *data, size_t len) override { written_data.assign(data, data + len); } + + MOCK_METHOD(bool, read_array, (uint8_t * data, size_t len), (override)); + MOCK_METHOD(bool, peek_byte, (uint8_t * data), (override)); + MOCK_METHOD(int, available, (), (override)); + MOCK_METHOD(void, flush, (), (override)); + MOCK_METHOD(void, check_logger_conflict, (), (override)); +}; + +} // namespace esphome::uart::testing diff --git a/tests/components/uart/uart_component.cpp b/tests/components/uart/uart_component.cpp new file mode 100644 index 0000000000..2cab1f62ad --- /dev/null +++ b/tests/components/uart/uart_component.cpp @@ -0,0 +1,73 @@ +#include "common.h" + +namespace esphome::uart::testing { + +TEST(UARTComponentTest, SetGetBaudRate) { + MockUARTComponent mock; + mock.set_baud_rate(38400); + EXPECT_EQ(mock.get_baud_rate(), 38400); +} + +TEST(UARTComponentTest, SetGetStopBits) { + MockUARTComponent mock; + mock.set_stop_bits(2); + EXPECT_EQ(mock.get_stop_bits(), 2); +} + +TEST(UARTComponentTest, SetGetDataBits) { + MockUARTComponent mock; + mock.set_data_bits(7); + EXPECT_EQ(mock.get_data_bits(), 7); +} + +TEST(UARTComponentTest, SetGetParity) { + MockUARTComponent mock; + mock.set_parity(UARTParityOptions::UART_CONFIG_PARITY_EVEN); + EXPECT_EQ(mock.get_parity(), UARTParityOptions::UART_CONFIG_PARITY_EVEN); +} + +TEST(UARTComponentTest, SetGetRxBufferSize) { + MockUARTComponent mock; + mock.set_rx_buffer_size(128); + EXPECT_EQ(mock.get_rx_buffer_size(), 128); +} + +TEST(UARTComponentTest, WriteArrayVector) { + MockUARTComponent mock; + std::vector data = {10, 20, 30}; + mock.write_array(data); + EXPECT_EQ(mock.written_data, data); +} +TEST(UARTComponentTest, WriteByte) { + MockUARTComponent mock; + uint8_t byte = 0x79; + mock.write_byte(byte); + EXPECT_EQ(mock.written_data.size(), 1); + EXPECT_EQ(mock.written_data[0], byte); +} + +TEST(UARTComponentTest, WriteStr) { + MockUARTComponent mock; + const char *str = "Hello"; + std::vector captured; + mock.write_str(str); + EXPECT_EQ(mock.written_data.size(), strlen(str)); + EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size())); +} + +// Tests for wrapper methods forwarding to pure virtual read_array +TEST(UARTComponentTest, ReadByteSuccess) { + MockUARTComponent mock; + uint8_t value = 0; + EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(true)); + EXPECT_TRUE(mock.read_byte(&value)); +} + +TEST(UARTComponentTest, ReadByteFailure) { + MockUARTComponent mock; + uint8_t value = 0xFF; + EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(false)); + EXPECT_FALSE(mock.read_byte(&value)); +} + +} // namespace esphome::uart::testing diff --git a/tests/components/uart/uart_device.cpp b/tests/components/uart/uart_device.cpp new file mode 100644 index 0000000000..c3f1d9078b --- /dev/null +++ b/tests/components/uart/uart_device.cpp @@ -0,0 +1,108 @@ +#include "common.h" +#include "esphome/components/uart/uart.h" + +namespace esphome::uart::testing { + +TEST(UARTDeviceTest, ReadByteSuccess) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, read_array(_, 1)).WillOnce(DoAll(SetArgPointee<0>(0x5A), Return(true))); + bool result = dev.read_byte(&value); + EXPECT_TRUE(result); + EXPECT_EQ(value, 0x5A); +} + +TEST(UARTDeviceTest, ReadByteFailure) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0xFF; + EXPECT_CALL(mock, read_array(_, 1)).WillOnce(Return(false)); + bool result = dev.read_byte(&value); + EXPECT_FALSE(result); +} + +TEST(UARTDeviceTest, PeekByteSuccess) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, peek_byte(_)).WillOnce(DoAll(SetArgPointee<0>(0xA5), Return(true))); + bool result = dev.peek_byte(&value); + EXPECT_TRUE(result); + EXPECT_EQ(value, 0xA5); +} + +TEST(UARTDeviceTest, PeekByteFailure) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, peek_byte(_)).WillOnce(Return(false)); + bool result = dev.peek_byte(&value); + EXPECT_FALSE(result); +} + +TEST(UARTDeviceTest, Available) { + MockUARTComponent mock; + UARTDevice dev(&mock); + EXPECT_CALL(mock, available()).WillOnce(Return(5)); + EXPECT_EQ(dev.available(), 5); +} + +TEST(UARTDeviceTest, FlushCallsParent) { + MockUARTComponent mock; + UARTDevice dev(&mock); + EXPECT_CALL(mock, flush()).Times(1); + dev.flush(); +} + +TEST(UARTDeviceTest, WriteByteForwardsToWriteArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + dev.write_byte(0xAB); + EXPECT_EQ(mock.written_data.size(), 1); + EXPECT_EQ(mock.written_data[0], 0xAB); +} +TEST(UARTDeviceTest, WriteArrayPointer) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t data[3] = {1, 2, 3}; + dev.write_array(data, 3); + EXPECT_EQ(mock.written_data.size(), 3); + EXPECT_EQ(mock.written_data, std::vector(data, data + 3)); +} + +TEST(UARTDeviceTest, WriteArrayVector) { + MockUARTComponent mock; + UARTDevice dev(&mock); + std::vector data = {4, 5, 6}; + dev.write_array(data); + EXPECT_EQ(mock.written_data, data); +} + +TEST(UARTDeviceTest, WriteArrayStdArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + std::array data = {7, 8, 9, 10}; + dev.write_array(data); + EXPECT_EQ(mock.written_data.size(), data.size()); + EXPECT_EQ(mock.written_data, std::vector(data.begin(), data.end())); +} + +TEST(UARTDeviceTest, WriteStrForwardsToWriteArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + const char *str = "ESPHome"; + dev.write_str(str); + EXPECT_EQ(mock.written_data.size(), strlen(str)); + EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size())); +} + +TEST(UARTDeviceTest, WriteStrEmptyString) { + MockUARTComponent mock; + UARTDevice dev(&mock); + const char *str = ""; + dev.write_str(str); + EXPECT_EQ(mock.written_data.size(), 0); +} + +} // namespace esphome::uart::testing diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 35652e0efc..a859b3c24d 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -5,7 +5,6 @@ import importlib.util import json import os from pathlib import Path -import subprocess import sys from unittest.mock import Mock, call, patch @@ -56,9 +55,9 @@ def mock_should_run_python_linters() -> Generator[Mock, None, None]: @pytest.fixture -def mock_subprocess_run() -> Generator[Mock, None, None]: - """Mock subprocess.run for list-components.py calls.""" - with patch.object(determine_jobs.subprocess, "run") as mock: +def mock_determine_cpp_unit_tests() -> Generator[Mock, None, None]: + """Mock determine_cpp_unit_tests from helpers.""" + with patch.object(determine_jobs, "determine_cpp_unit_tests") as mock: yield mock @@ -82,8 +81,8 @@ def test_main_all_tests_should_run( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -95,6 +94,7 @@ def test_main_all_tests_should_run( mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = True mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (False, ["wifi", "api", "sensor"]) # Mock changed_files to return non-component files (to avoid memory impact) # Memory impact only runs when component C++ files change @@ -114,15 +114,15 @@ def test_main_all_tests_should_run( ), patch.object( determine_jobs, - "filter_component_files", + "filter_component_and_test_files", side_effect=lambda f: f.startswith("esphome/components/"), ), patch.object( determine_jobs, "get_components_with_dependencies", - side_effect=lambda files, deps: ["wifi", "api"] - if not deps - else ["wifi", "api", "sensor"], + side_effect=lambda files, deps: ( + ["wifi", "api"] if not deps else ["wifi", "api", "sensor"] + ), ), ): determine_jobs.main() @@ -150,6 +150,8 @@ def test_main_all_tests_should_run( # memory_impact should be false (no component C++ files changed) assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == ["wifi", "api", "sensor"] def test_main_no_tests_should_run( @@ -157,8 +159,8 @@ def test_main_no_tests_should_run( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -170,6 +172,7 @@ def test_main_no_tests_should_run( mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False + mock_determine_cpp_unit_tests.return_value = (False, []) # Mock changed_files to return no component files mock_changed_files.return_value = [] @@ -178,7 +181,9 @@ def test_main_no_tests_should_run( with ( patch("sys.argv", ["determine-jobs.py"]), patch.object(determine_jobs, "get_changed_components", return_value=[]), - patch.object(determine_jobs, "filter_component_files", return_value=False), + patch.object( + determine_jobs, "filter_component_and_test_files", return_value=False + ), patch.object( determine_jobs, "get_components_with_dependencies", return_value=[] ), @@ -202,31 +207,8 @@ def test_main_no_tests_should_run( # memory_impact should be present assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" - - -def test_main_list_components_fails( - mock_should_run_integration_tests: Mock, - mock_should_run_clang_tidy: Mock, - mock_should_run_clang_format: Mock, - mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, - capsys: pytest.CaptureFixture[str], -) -> None: - """Test when list-components.py fails.""" - mock_should_run_integration_tests.return_value = True - mock_should_run_clang_tidy.return_value = True - mock_should_run_clang_format.return_value = True - mock_should_run_python_linters.return_value = True - - # Mock list-components.py failure - mock_subprocess_run.side_effect = subprocess.CalledProcessError(1, "cmd") - - # Run main function with mocked argv - should raise - with ( - patch("sys.argv", ["determine-jobs.py"]), - pytest.raises(subprocess.CalledProcessError), - ): - determine_jobs.main() + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == [] def test_main_with_branch_argument( @@ -234,8 +216,8 @@ def test_main_with_branch_argument( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -247,6 +229,7 @@ def test_main_with_branch_argument( mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (False, ["mqtt"]) # Mock changed_files to return non-component files (to avoid memory impact) # Memory impact only runs when component C++ files change @@ -258,7 +241,7 @@ def test_main_with_branch_argument( patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]), patch.object( determine_jobs, - "filter_component_files", + "filter_component_and_test_files", side_effect=lambda f: f.startswith("esphome/components/"), ), patch.object( @@ -296,6 +279,8 @@ def test_main_with_branch_argument( # memory_impact should be false (no component C++ files changed) assert "memory_impact" in output assert output["memory_impact"]["should_run"] == "false" + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == ["mqtt"] def test_should_run_integration_tests( @@ -506,7 +491,6 @@ def test_main_filters_components_without_tests( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], tmp_path: Path, @@ -556,16 +540,17 @@ def test_main_filters_components_without_tests( ), patch.object( determine_jobs, - "filter_component_files", + "filter_component_and_test_files", side_effect=lambda f: f.startswith("esphome/components/"), ), patch.object( determine_jobs, "get_components_with_dependencies", - side_effect=lambda files, deps: ["wifi", "sensor"] - if not deps - else ["wifi", "sensor", "airthings_ble"], + side_effect=lambda files, deps: ( + ["wifi", "sensor"] if not deps else ["wifi", "sensor", "airthings_ble"] + ), ), + patch.object(determine_jobs, "changed_files", return_value=[]), ): # Clear the cache since we're mocking root_path determine_jobs._component_has_tests.cache_clear() @@ -808,7 +793,6 @@ def test_clang_tidy_mode_full_scan( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, @@ -829,7 +813,9 @@ def test_clang_tidy_mode_full_scan( patch("sys.argv", ["determine-jobs.py"]), patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True), patch.object(determine_jobs, "get_changed_components", return_value=[]), - patch.object(determine_jobs, "filter_component_files", return_value=False), + patch.object( + determine_jobs, "filter_component_and_test_files", return_value=False + ), patch.object( determine_jobs, "get_components_with_dependencies", return_value=[] ), @@ -873,7 +859,6 @@ def test_clang_tidy_mode_targeted_scan( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], monkeypatch: pytest.MonkeyPatch, @@ -912,7 +897,7 @@ def test_clang_tidy_mode_targeted_scan( patch.object(determine_jobs, "get_changed_components", return_value=components), patch.object( determine_jobs, - "filter_component_files", + "filter_component_and_test_files", side_effect=lambda f: f.startswith("esphome/components/"), ), patch.object( From 9c712744bea9c495bfc7c62275c4f8c03cf9493a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 12:40:19 -1000 Subject: [PATCH 271/336] [light] Replace std::vector with FixedVector in strobe and color_wipe effects (#11455) --- esphome/components/light/addressable_light_effect.h | 6 +++--- esphome/components/light/base_light_effects.h | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/esphome/components/light/addressable_light_effect.h b/esphome/components/light/addressable_light_effect.h index fcf76b3cb0..9caccad634 100644 --- a/esphome/components/light/addressable_light_effect.h +++ b/esphome/components/light/addressable_light_effect.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/light/light_state.h" #include "esphome/components/light/addressable_light.h" @@ -113,7 +113,7 @@ struct AddressableColorWipeEffectColor { class AddressableColorWipeEffect : public AddressableLightEffect { public: explicit AddressableColorWipeEffect(const std::string &name) : AddressableLightEffect(name) {} - void set_colors(const std::vector &colors) { this->colors_ = colors; } + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } void set_add_led_interval(uint32_t add_led_interval) { this->add_led_interval_ = add_led_interval; } void set_reverse(bool reverse) { this->reverse_ = reverse; } void apply(AddressableLight &it, const Color ¤t_color) override { @@ -155,7 +155,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect { } protected: - std::vector colors_; + FixedVector colors_; size_t at_color_{0}; uint32_t last_add_{0}; uint32_t add_led_interval_{}; diff --git a/esphome/components/light/base_light_effects.h b/esphome/components/light/base_light_effects.h index ff6cd1ccfe..c74d19fe14 100644 --- a/esphome/components/light/base_light_effects.h +++ b/esphome/components/light/base_light_effects.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/automation.h" +#include "esphome/core/helpers.h" #include "light_effect.h" namespace esphome { @@ -188,10 +188,10 @@ class StrobeLightEffect : public LightEffect { this->last_switch_ = now; } - void set_colors(const std::vector &colors) { this->colors_ = colors; } + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } protected: - std::vector colors_; + FixedVector colors_; uint32_t last_switch_{0}; size_t at_color_{0}; }; From f9fe2d21e54c5cbb8fe0260b6ba4ad30abb19b7b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 13:25:51 -1000 Subject: [PATCH 272/336] tweaks --- esphome/components/wifi/__init__.py | 15 ++++++++------- esphome/components/wifi/wifi_component.cpp | 9 +++++++-- esphome/components/wifi/wifi_component.h | 3 ++- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 76155763fb..c7632a0c6b 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -378,18 +378,19 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - # Build all WiFiAP objects + # Initialize FixedVector with the count of networks networks = config.get(CONF_NETWORKS, []) if networks: - wifi_aps = [] + cg.add(var.init_sta(len(networks))) + + def add_sta(ap, network): + ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) + cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + for network in networks: if CONF_EAP in network: has_eap = True - ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - wifi_aps.append(wifi_network(network, WiFiAP(), ip_config)) - - # Set all WiFi networks at once - cg.add(var.set_stas(wifi_aps)) + cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) if CONF_AP in config: conf = config[CONF_AP] diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index a7b66114c8..b278e5a386 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -330,8 +330,13 @@ float WiFiComponent::get_loop_priority() const { return 10.0f; // before other loop components } -void WiFiComponent::set_stas(const std::initializer_list &aps) { this->sta_ = aps; } -void WiFiComponent::set_sta(const WiFiAP &ap) { this->set_stas({ap}); } +void WiFiComponent::init_sta(size_t count) { this->sta_.init(count); } +void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); } +void WiFiComponent::set_sta(const WiFiAP &ap) { + this->clear_sta(); + this->init_sta(1); + this->add_sta(ap); +} void WiFiComponent::clear_sta() { this->sta_.clear(); } void WiFiComponent::save_wifi_sta(const std::string &ssid, const std::string &password) { SavedWifiSettings save{}; // zero-initialized - all bytes set to \0, guaranteeing null termination diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 0bcfd7445a..42f78dbfac 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -219,7 +219,8 @@ class WiFiComponent : public Component { void set_sta(const WiFiAP &ap); WiFiAP get_sta() { return this->selected_ap_; } - void set_stas(const std::initializer_list &aps); + void init_sta(size_t count); + void add_sta(const WiFiAP &ap); void clear_sta(); #ifdef USE_WIFI_AP From 35f3c6b098c4798155544abb47bcc71ffe1faf3b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 13:44:46 -1000 Subject: [PATCH 273/336] preen --- esphome/components/wifi/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index c7632a0c6b..29d33bfc76 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -383,7 +383,7 @@ async def to_code(config): if networks: cg.add(var.init_sta(len(networks))) - def add_sta(ap, network): + def add_sta(ap: cg.MockObj, network: dict) -> None: ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) cg.add(var.add_sta(wifi_network(network, ap, ip_config))) From ece0619070dc03e77776e61970de27bc35740c86 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 14:05:43 -1000 Subject: [PATCH 274/336] [event] Replace std::set with FixedVector for event type storage --- esphome/components/event/event.cpp | 13 ++++++++++--- esphome/components/event/event.h | 7 +++---- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/esphome/components/event/event.cpp b/esphome/components/event/event.cpp index d27b3b378e..20549ad0a5 100644 --- a/esphome/components/event/event.cpp +++ b/esphome/components/event/event.cpp @@ -8,12 +8,19 @@ namespace event { static const char *const TAG = "event"; void Event::trigger(const std::string &event_type) { - auto found = types_.find(event_type); - if (found == types_.end()) { + // Linear search - faster than std::set for small datasets (1-5 items typical) + const std::string *found = nullptr; + for (const auto &type : this->types_) { + if (type == event_type) { + found = &type; + break; + } + } + if (found == nullptr) { ESP_LOGE(TAG, "'%s': invalid event type for trigger(): %s", this->get_name().c_str(), event_type.c_str()); return; } - last_event_type = &(*found); + last_event_type = found; ESP_LOGD(TAG, "'%s' Triggered event '%s'", this->get_name().c_str(), last_event_type->c_str()); this->event_callback_.call(event_type); } diff --git a/esphome/components/event/event.h b/esphome/components/event/event.h index a90c8ebe05..2f6267a200 100644 --- a/esphome/components/event/event.h +++ b/esphome/components/event/event.h @@ -1,6 +1,5 @@ #pragma once -#include #include #include "esphome/core/component.h" @@ -26,13 +25,13 @@ class Event : public EntityBase, public EntityBase_DeviceClass { const std::string *last_event_type; void trigger(const std::string &event_type); - void set_event_types(const std::set &event_types) { this->types_ = event_types; } - std::set get_event_types() const { return this->types_; } + void set_event_types(const std::initializer_list &event_types) { this->types_ = event_types; } + const FixedVector &get_event_types() const { return this->types_; } void add_on_event_callback(std::function &&callback); protected: CallbackManager event_callback_; - std::set types_; + FixedVector types_; }; } // namespace event From c6ae1a5909c0a0f2ec87c479da43a5602109eeb5 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 22 Oct 2025 14:00:27 +1300 Subject: [PATCH 275/336] [core] Stop clang-format "fixing" a single line (#11462) --- esphome/core/defines.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/esphome/core/defines.h b/esphome/core/defines.h index 4e9fb078a0..39698c1004 100644 --- a/esphome/core/defines.h +++ b/esphome/core/defines.h @@ -243,8 +243,10 @@ // Dummy firmware payload for shelly_dimmer #define USE_SHD_FIRMWARE_MAJOR_VERSION 56 #define USE_SHD_FIRMWARE_MINOR_VERSION 5 +// clang-format off #define USE_SHD_FIRMWARE_DATA \ {} +// clang-format on #define USE_WEBSERVER #define USE_WEBSERVER_AUTH From 2c1927fd123ed96f1cbab482ef533e81d5b4d402 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 22 Oct 2025 14:24:56 +1300 Subject: [PATCH 276/336] [api] Allow clearing noise psk if dynamically set (#11429) --- esphome/components/api/api_connection.cpp | 8 ++- esphome/components/api/api_server.cpp | 61 ++++++++++++------- esphome/components/api/api_server.h | 5 ++ ...noise_encryption_key_clear_protection.yaml | 10 +++ .../test_noise_encryption_key_protection.py | 39 ++++++++++++ 5 files changed, 101 insertions(+), 22 deletions(-) create mode 100644 tests/integration/fixtures/noise_encryption_key_clear_protection.yaml diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 6334815678..7c135946f8 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -1572,7 +1572,13 @@ bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryption resp.success = false; psk_t psk{}; - if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) { + if (msg.key.empty()) { + if (this->parent_->clear_noise_psk(true)) { + resp.success = true; + } else { + ESP_LOGW(TAG, "Failed to clear encryption key"); + } + } else if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) { ESP_LOGW(TAG, "Invalid encryption key length"); } else if (!this->parent_->save_noise_psk(psk, true)) { ESP_LOGW(TAG, "Failed to save encryption key"); diff --git a/esphome/components/api/api_server.cpp b/esphome/components/api/api_server.cpp index 778d9389ef..e618610a75 100644 --- a/esphome/components/api/api_server.cpp +++ b/esphome/components/api/api_server.cpp @@ -468,6 +468,31 @@ uint16_t APIServer::get_port() const { return this->port_; } void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeout_ = reboot_timeout; } #ifdef USE_API_NOISE +bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, + const LogString *fail_log_msg, const psk_t &active_psk, bool make_active) { + if (!this->noise_pref_.save(&new_psk)) { + ESP_LOGW(TAG, "%s", LOG_STR_ARG(fail_log_msg)); + return false; + } + // ensure it's written immediately + if (!global_preferences->sync()) { + ESP_LOGW(TAG, "Failed to sync preferences"); + return false; + } + ESP_LOGD(TAG, "%s", LOG_STR_ARG(save_log_msg)); + if (make_active) { + this->set_timeout(100, [this, active_psk]() { + ESP_LOGW(TAG, "Disconnecting all clients to reset PSK"); + this->set_noise_psk(active_psk); + for (auto &c : this->clients_) { + DisconnectRequest req; + c->send_message(req, DisconnectRequest::MESSAGE_TYPE); + } + }); + } + return true; +} + bool APIServer::save_noise_psk(psk_t psk, bool make_active) { #ifdef USE_API_NOISE_PSK_FROM_YAML // When PSK is set from YAML, this function should never be called @@ -482,27 +507,21 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) { } SavedNoisePsk new_saved_psk{psk}; - if (!this->noise_pref_.save(&new_saved_psk)) { - ESP_LOGW(TAG, "Failed to save Noise PSK"); - return false; - } - // ensure it's written immediately - if (!global_preferences->sync()) { - ESP_LOGW(TAG, "Failed to sync preferences"); - return false; - } - ESP_LOGD(TAG, "Noise PSK saved"); - if (make_active) { - this->set_timeout(100, [this, psk]() { - ESP_LOGW(TAG, "Disconnecting all clients to reset PSK"); - this->set_noise_psk(psk); - for (auto &c : this->clients_) { - DisconnectRequest req; - c->send_message(req, DisconnectRequest::MESSAGE_TYPE); - } - }); - } - return true; + return this->update_noise_psk_(new_saved_psk, LOG_STR("Noise PSK saved"), LOG_STR("Failed to save Noise PSK"), psk, + make_active); +#endif +} +bool APIServer::clear_noise_psk(bool make_active) { +#ifdef USE_API_NOISE_PSK_FROM_YAML + // When PSK is set from YAML, this function should never be called + // but if it is, reject the change + ESP_LOGW(TAG, "Key set in YAML"); + return false; +#else + SavedNoisePsk empty_psk{}; + psk_t empty{}; + return this->update_noise_psk_(empty_psk, LOG_STR("Noise PSK cleared"), LOG_STR("Failed to clear Noise PSK"), empty, + make_active); #endif } #endif diff --git a/esphome/components/api/api_server.h b/esphome/components/api/api_server.h index 5d038e5ddd..e0e23301d0 100644 --- a/esphome/components/api/api_server.h +++ b/esphome/components/api/api_server.h @@ -53,6 +53,7 @@ class APIServer : public Component, public Controller { #ifdef USE_API_NOISE bool save_noise_psk(psk_t psk, bool make_active = true); + bool clear_noise_psk(bool make_active = true); void set_noise_psk(psk_t psk) { noise_ctx_->set_psk(psk); } std::shared_ptr get_noise_ctx() { return noise_ctx_; } #endif // USE_API_NOISE @@ -174,6 +175,10 @@ class APIServer : public Component, public Controller { protected: void schedule_reboot_timeout_(); +#ifdef USE_API_NOISE + bool update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, const LogString *fail_log_msg, + const psk_t &active_psk, bool make_active); +#endif // USE_API_NOISE // Pointers and pointer-like types first (4 bytes each) std::unique_ptr socket_ = nullptr; #ifdef USE_API_CLIENT_CONNECTED_TRIGGER diff --git a/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml b/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml new file mode 100644 index 0000000000..3ce84cd373 --- /dev/null +++ b/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml @@ -0,0 +1,10 @@ +esphome: + name: noise-key-test + +host: + +api: + encryption: + key: "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs=" + +logger: diff --git a/tests/integration/test_noise_encryption_key_protection.py b/tests/integration/test_noise_encryption_key_protection.py index 03c43ca8d3..37d32ce2b4 100644 --- a/tests/integration/test_noise_encryption_key_protection.py +++ b/tests/integration/test_noise_encryption_key_protection.py @@ -49,3 +49,42 @@ async def test_noise_encryption_key_protection( with pytest.raises(InvalidEncryptionKeyAPIError): async with api_client_connected(noise_psk=wrong_key) as client: await client.device_info() + + +@pytest.mark.asyncio +async def test_noise_encryption_key_clear_protection( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that noise encryption key set in YAML cannot be changed via API.""" + # The key that's set in the YAML fixture + noise_psk = "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs=" + + # Keep ESPHome process running throughout all tests + async with run_compiled(yaml_config): + # First connection - test key change attempt + async with api_client_connected(noise_psk=noise_psk) as client: + # Verify connection is established + device_info = await client.device_info() + assert device_info is not None + + # Try to set a new encryption key via API + new_key = b"" # Empty key to attempt to clear + + # This should fail since key was set in YAML + success = await client.noise_encryption_set_key(new_key) + assert success is False + + # Reconnect with the original key to verify it still works + async with api_client_connected(noise_psk=noise_psk) as client: + # Verify connection is still successful with original key + device_info = await client.device_info() + assert device_info is not None + assert device_info.name == "noise-key-test" + + # Verify that connecting with a wrong key fails + wrong_key = base64.b64encode(b"y" * 32).decode() # Different key + with pytest.raises(InvalidEncryptionKeyAPIError): + async with api_client_connected(noise_psk=wrong_key) as client: + await client.device_info() From 78ffeb30fb9f3cb578fd17d8ed852c139696f191 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 17:55:13 -1000 Subject: [PATCH 277/336] [binary_sensor] Optimize MultiClickTrigger with FixedVector (#11453) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- esphome/components/binary_sensor/automation.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/esphome/components/binary_sensor/automation.h b/esphome/components/binary_sensor/automation.h index b46436dc41..0bc7b9acb3 100644 --- a/esphome/components/binary_sensor/automation.h +++ b/esphome/components/binary_sensor/automation.h @@ -2,11 +2,11 @@ #include #include -#include #include "esphome/core/component.h" #include "esphome/core/automation.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/binary_sensor.h" namespace esphome { @@ -92,8 +92,8 @@ class DoubleClickTrigger : public Trigger<> { class MultiClickTrigger : public Trigger<>, public Component { public: - explicit MultiClickTrigger(BinarySensor *parent, std::vector timing) - : parent_(parent), timing_(std::move(timing)) {} + explicit MultiClickTrigger(BinarySensor *parent, std::initializer_list timing) + : parent_(parent), timing_(timing) {} void setup() override { this->last_state_ = this->parent_->get_state_default(false); @@ -115,7 +115,7 @@ class MultiClickTrigger : public Trigger<>, public Component { void trigger_(); BinarySensor *parent_; - std::vector timing_; + FixedVector timing_; uint32_t invalid_cooldown_{1000}; optional at_index_{}; bool last_state_{false}; From e3aaf6a1440d066429b8b0465538f6f10b873a33 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 17:55:46 -1000 Subject: [PATCH 278/336] [wifi] Test multiple stas in wifi compile tests (#11460) --- tests/components/wifi/common.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/components/wifi/common.yaml b/tests/components/wifi/common.yaml index 343d44b177..af27f85092 100644 --- a/tests/components/wifi/common.yaml +++ b/tests/components/wifi/common.yaml @@ -12,5 +12,8 @@ esphome: - logger.log: "Failed to connect to WiFi!" wifi: - ssid: MySSID - password: password1 + networks: + - ssid: MySSID + password: password1 + - ssid: MySSID2 + password: password2 From 0de79ba29144a38ba4de245990dc033cc0cddd1c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 17:57:18 -1000 Subject: [PATCH 279/336] [event] Replace std::set with FixedVector for event type storage (#11463) --- esphome/components/event/event.cpp | 13 ++++++++++--- esphome/components/event/event.h | 7 +++---- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/esphome/components/event/event.cpp b/esphome/components/event/event.cpp index d27b3b378e..20549ad0a5 100644 --- a/esphome/components/event/event.cpp +++ b/esphome/components/event/event.cpp @@ -8,12 +8,19 @@ namespace event { static const char *const TAG = "event"; void Event::trigger(const std::string &event_type) { - auto found = types_.find(event_type); - if (found == types_.end()) { + // Linear search - faster than std::set for small datasets (1-5 items typical) + const std::string *found = nullptr; + for (const auto &type : this->types_) { + if (type == event_type) { + found = &type; + break; + } + } + if (found == nullptr) { ESP_LOGE(TAG, "'%s': invalid event type for trigger(): %s", this->get_name().c_str(), event_type.c_str()); return; } - last_event_type = &(*found); + last_event_type = found; ESP_LOGD(TAG, "'%s' Triggered event '%s'", this->get_name().c_str(), last_event_type->c_str()); this->event_callback_.call(event_type); } diff --git a/esphome/components/event/event.h b/esphome/components/event/event.h index a90c8ebe05..2f6267a200 100644 --- a/esphome/components/event/event.h +++ b/esphome/components/event/event.h @@ -1,6 +1,5 @@ #pragma once -#include #include #include "esphome/core/component.h" @@ -26,13 +25,13 @@ class Event : public EntityBase, public EntityBase_DeviceClass { const std::string *last_event_type; void trigger(const std::string &event_type); - void set_event_types(const std::set &event_types) { this->types_ = event_types; } - std::set get_event_types() const { return this->types_; } + void set_event_types(const std::initializer_list &event_types) { this->types_ = event_types; } + const FixedVector &get_event_types() const { return this->types_; } void add_on_event_callback(std::function &&callback); protected: CallbackManager event_callback_; - std::set types_; + FixedVector types_; }; } // namespace event From 5b15827009a1269df71754b850295580ad90e0eb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 17:58:40 -1000 Subject: [PATCH 280/336] [CI] Fix component detection when core files change in determine-jobs (#11461) --- script/determine-jobs.py | 24 ++++++------ tests/script/test_determine_jobs.py | 57 +++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 11 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 6651553ce7..7cdec959c7 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -606,21 +606,23 @@ def main() -> None: # [list]: Changed components (already includes dependencies) changed_components_result = get_changed_components() + # Always analyze component files, even if core files changed + # This is needed for component testing and memory impact analysis + changed = changed_files(args.branch) + component_files = [f for f in changed if filter_component_and_test_files(f)] + + directly_changed_components = get_components_with_dependencies( + component_files, False + ) + if changed_components_result is None: # Core files changed - will trigger full clang-tidy scan - # No specific components to test - changed_components = [] - directly_changed_components = [] + # But we still need to track changed components for testing and memory analysis + changed_components = get_components_with_dependencies(component_files, True) is_core_change = True else: - # Get both directly changed and all changed (with dependencies) - changed = changed_files(args.branch) - component_files = [f for f in changed if filter_component_and_test_files(f)] - - directly_changed_components = get_components_with_dependencies( - component_files, False - ) - changed_components = get_components_with_dependencies(component_files, True) + # Use the result from get_changed_components() which includes dependencies + changed_components = changed_components_result is_core_change = False # Filter to only components that have test files diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index a859b3c24d..6095e86ea7 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -910,3 +910,60 @@ def test_clang_tidy_mode_targeted_scan( output = json.loads(captured.out) assert output["clang_tidy_mode"] == expected_mode + + +def test_main_core_files_changed_still_detects_components( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that component changes are detected even when core files change.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = True + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = True + mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (True, []) + + mock_changed_files.return_value = [ + "esphome/core/helpers.h", + "esphome/components/select/select_traits.h", + "esphome/components/select/select_traits.cpp", + "esphome/components/api/api.proto", + ] + + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "get_changed_components", return_value=None), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["select", "api"] + if not deps + else ["select", "api", "bluetooth_proxy", "logger"] + ), + ), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] == "split" + assert "select" in output["changed_components"] + assert "api" in output["changed_components"] + assert len(output["changed_components"]) > 0 From 146b067d629ee0401fee030815ef7eaf89fe8e06 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 17:59:39 -1000 Subject: [PATCH 281/336] [light] Add compile test for addressable lights (#11465) --- tests/components/light/common.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/components/light/common.yaml b/tests/components/light/common.yaml index f807014065..247fc19aba 100644 --- a/tests/components/light/common.yaml +++ b/tests/components/light/common.yaml @@ -17,6 +17,20 @@ esphome: relative_brightness: 5% brightness_limits: max_brightness: 90% + - light.turn_on: + id: test_addressable_transition + brightness: 50% + red: 100% + green: 0% + blue: 0% + transition_length: 500ms + - light.turn_on: + id: test_addressable_transition + brightness: 100% + red: 0% + green: 100% + blue: 0% + transition_length: 1s light: - platform: binary @@ -163,3 +177,9 @@ light: blue: 0% duration: 1s transition_length: 500ms + - platform: partition + id: test_addressable_transition + name: Addressable Transition Test + default_transition_length: 1s + segments: + - single_light_id: test_rgb_light From e1c851cab88a408d9df25cd924caffd8db263f67 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 19:23:10 -1000 Subject: [PATCH 282/336] [wifi] Optimize WiFi network storage with FixedVector (#11458) Co-authored-by: Jesse Hills <3060199+jesserockz@users.noreply.github.com> --- esphome/components/wifi/__init__.py | 19 ++++++++++------- esphome/components/wifi/wifi_component.cpp | 2 ++ esphome/components/wifi/wifi_component.h | 3 ++- esphome/core/helpers.h | 24 ++++++++++++++++------ 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 494470cb48..29d33bfc76 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -378,14 +378,19 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - def add_sta(ap, network): - ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + # Initialize FixedVector with the count of networks + networks = config.get(CONF_NETWORKS, []) + if networks: + cg.add(var.init_sta(len(networks))) - for network in config.get(CONF_NETWORKS, []): - if CONF_EAP in network: - has_eap = True - cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) + def add_sta(ap: cg.MockObj, network: dict) -> None: + ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) + cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + + for network in networks: + if CONF_EAP in network: + has_eap = True + cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) if CONF_AP in config: conf = config[CONF_AP] diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index c89384d742..b278e5a386 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -330,9 +330,11 @@ float WiFiComponent::get_loop_priority() const { return 10.0f; // before other loop components } +void WiFiComponent::init_sta(size_t count) { this->sta_.init(count); } void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); } void WiFiComponent::set_sta(const WiFiAP &ap) { this->clear_sta(); + this->init_sta(1); this->add_sta(ap); } void WiFiComponent::clear_sta() { this->sta_.clear(); } diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 10aa82a065..42f78dbfac 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -219,6 +219,7 @@ class WiFiComponent : public Component { void set_sta(const WiFiAP &ap); WiFiAP get_sta() { return this->selected_ap_; } + void init_sta(size_t count); void add_sta(const WiFiAP &ap); void clear_sta(); @@ -393,7 +394,7 @@ class WiFiComponent : public Component { #endif std::string use_address_; - std::vector sta_; + FixedVector sta_; std::vector sta_priorities_; wifi_scan_vector_t scan_result_; WiFiAP selected_ap_; diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index 234d2a7d7d..9b0591c9c5 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -194,12 +194,8 @@ template class FixedVector { size_ = 0; } - public: - FixedVector() = default; - - /// Constructor from initializer list - allocates exact size needed - /// This enables brace initialization: FixedVector v = {1, 2, 3}; - FixedVector(std::initializer_list init_list) { + // Helper to assign from initializer list (shared by constructor and assignment operator) + void assign_from_initializer_list_(std::initializer_list init_list) { init(init_list.size()); size_t idx = 0; for (const auto &item : init_list) { @@ -209,6 +205,13 @@ template class FixedVector { size_ = init_list.size(); } + public: + FixedVector() = default; + + /// Constructor from initializer list - allocates exact size needed + /// This enables brace initialization: FixedVector v = {1, 2, 3}; + FixedVector(std::initializer_list init_list) { assign_from_initializer_list_(init_list); } + ~FixedVector() { cleanup_(); } // Disable copy operations (avoid accidental expensive copies) @@ -234,6 +237,15 @@ template class FixedVector { return *this; } + /// Assignment from initializer list - avoids temporary and move overhead + /// This enables: FixedVector v; v = {1, 2, 3}; + FixedVector &operator=(std::initializer_list init_list) { + cleanup_(); + reset_(); + assign_from_initializer_list_(init_list); + return *this; + } + // Allocate capacity - can be called multiple times to reinit void init(size_t n) { cleanup_(); From 777e73fd041ecc67539e339fe1ad1c5d4bfe4af2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 21:54:44 -1000 Subject: [PATCH 283/336] Extract ColorModeMask into EnumBitmask helper --- esphome/components/light/color_mode.h | 212 +++++++----------------- esphome/components/light/light_call.cpp | 2 +- esphome/components/light/light_traits.h | 2 +- esphome/core/enum_bitmask.h | 155 +++++++++++++++++ 4 files changed, 216 insertions(+), 155 deletions(-) create mode 100644 esphome/core/enum_bitmask.h diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index a26f917167..9c6a4d147b 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -1,6 +1,7 @@ #pragma once #include +#include "esphome/core/enum_bitmask.h" namespace esphome { namespace light { @@ -104,16 +105,16 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } -// Type alias for raw color mode bitmask values +// Type alias for raw color mode bitmask values (retained for compatibility) using color_mode_bitmask_t = uint16_t; -// Constants for ColorMode count and bit range -static constexpr int COLOR_MODE_COUNT = 10; // UNKNOWN through RGB_COLD_WARM_WHITE -static constexpr int MAX_BIT_INDEX = sizeof(color_mode_bitmask_t) * 8; // Number of bits in bitmask type +// Number of ColorMode enum values +constexpr int COLOR_MODE_BITMASK_SIZE = 10; -// Compile-time array of all ColorMode values in declaration order -// Bit positions (0-9) map directly to enum declaration order -static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = { +// Shared lookup table for ColorMode bit mapping +// This array defines the canonical order of color modes (bit 0-9) +// Declared early so it can be used by constexpr functions +constexpr ColorMode COLOR_MODE_LOOKUP[COLOR_MODE_BITMASK_SIZE] = { ColorMode::UNKNOWN, // bit 0 ColorMode::ON_OFF, // bit 1 ColorMode::BRIGHTNESS, // bit 2 @@ -126,33 +127,20 @@ static constexpr ColorMode COLOR_MODES[COLOR_MODE_COUNT] = { ColorMode::RGB_COLD_WARM_WHITE, // bit 9 }; -/// Map ColorMode enum values to bit positions (0-9) -/// Bit positions follow the enum declaration order -static constexpr int mode_to_bit(ColorMode mode) { - // Linear search through COLOR_MODES array - // Compiler optimizes this to efficient code since array is constexpr - for (int i = 0; i < COLOR_MODE_COUNT; ++i) { - if (COLOR_MODES[i] == mode) - return i; - } - return 0; -} +// Type alias for ColorMode bitmask using generic EnumBitmask template +using ColorModeMask = EnumBitmask; -/// Map bit positions (0-9) to ColorMode enum values -/// Bit positions follow the enum declaration order -static constexpr ColorMode bit_to_mode(int bit) { - // Direct lookup in COLOR_MODES array - return (bit >= 0 && bit < COLOR_MODE_COUNT) ? COLOR_MODES[bit] : ColorMode::UNKNOWN; -} +// Number of ColorCapability enum values +constexpr int COLOR_CAPABILITY_COUNT = 6; /// Helper to compute capability bitmask at compile time -static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability capability) { - color_mode_bitmask_t mask = 0; +constexpr uint16_t compute_capability_bitmask(ColorCapability capability) { + uint16_t mask = 0; uint8_t cap_bit = static_cast(capability); // Check each ColorMode to see if it has this capability - for (int bit = 0; bit < COLOR_MODE_COUNT; ++bit) { - uint8_t mode_val = static_cast(bit_to_mode(bit)); + for (int bit = 0; bit < COLOR_MODE_BITMASK_SIZE; ++bit) { + uint8_t mode_val = static_cast(COLOR_MODE_LOOKUP[bit]); if ((mode_val & cap_bit) != 0) { mask |= (1 << bit); } @@ -160,12 +148,9 @@ static constexpr color_mode_bitmask_t compute_capability_bitmask(ColorCapability return mask; } -// Number of ColorCapability enum values -static constexpr int COLOR_CAPABILITY_COUNT = 6; - /// Compile-time lookup table mapping ColorCapability to bitmask /// This array is computed at compile time using constexpr -static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = { +constexpr uint16_t CAPABILITY_BITMASKS[] = { compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0 compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1 compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2 @@ -174,130 +159,51 @@ static constexpr color_mode_bitmask_t CAPABILITY_BITMASKS[] = { compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 }; -/// Bitmask for storing a set of ColorMode values efficiently. -/// Replaces std::set to eliminate red-black tree overhead (~586 bytes). -class ColorModeMask { - public: - constexpr ColorModeMask() = default; - - /// Support initializer list syntax: {ColorMode::RGB, ColorMode::WHITE} - constexpr ColorModeMask(std::initializer_list modes) { - for (auto mode : modes) { - this->add(mode); - } - } - - constexpr void add(ColorMode mode) { this->mask_ |= (1 << mode_to_bit(mode)); } - - /// Add multiple modes at once using initializer list - constexpr void add(std::initializer_list modes) { - for (auto mode : modes) { - this->add(mode); - } - } - - constexpr bool contains(ColorMode mode) const { return (this->mask_ & (1 << mode_to_bit(mode))) != 0; } - - constexpr size_t size() const { - // Count set bits using Brian Kernighan's algorithm - // More efficient for sparse bitmasks (typical case: 2-4 modes out of 10) - uint16_t n = this->mask_; - size_t count = 0; - while (n) { - n &= n - 1; // Clear the least significant set bit - count++; - } - return count; - } - - constexpr bool empty() const { return this->mask_ == 0; } - - /// Iterator support for API encoding - class Iterator { - public: - using iterator_category = std::forward_iterator_tag; - using value_type = ColorMode; - using difference_type = std::ptrdiff_t; - using pointer = const ColorMode *; - using reference = ColorMode; - - constexpr Iterator(color_mode_bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } - - constexpr ColorMode operator*() const { return bit_to_mode(bit_); } - - constexpr Iterator &operator++() { - ++bit_; - advance_to_next_set_bit_(); - return *this; - } - - constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; } - - constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } - - private: - constexpr void advance_to_next_set_bit_() { bit_ = ColorModeMask::find_next_set_bit(mask_, bit_); } - - color_mode_bitmask_t mask_; - int bit_; - }; - - constexpr Iterator begin() const { return Iterator(mask_, 0); } - constexpr Iterator end() const { return Iterator(mask_, MAX_BIT_INDEX); } - - /// Get the raw bitmask value for API encoding - constexpr color_mode_bitmask_t get_mask() const { return this->mask_; } - - /// Find the next set bit in a bitmask starting from a given position - /// Returns the bit position, or MAX_BIT_INDEX if no more bits are set - static constexpr int find_next_set_bit(color_mode_bitmask_t mask, int start_bit) { - int bit = start_bit; - while (bit < MAX_BIT_INDEX && !(mask & (1 << bit))) { - ++bit; - } - return bit; - } - - /// Find the first set bit in a bitmask and return the corresponding ColorMode - /// Used for optimizing compute_color_mode_() intersection logic - static constexpr ColorMode first_mode_from_mask(color_mode_bitmask_t mask) { - return bit_to_mode(find_next_set_bit(mask, 0)); - } - - /// Check if a ColorMode is present in a raw bitmask value - /// Useful for checking intersection results without creating a temporary ColorModeMask - static constexpr bool mask_contains(color_mode_bitmask_t mask, ColorMode mode) { - return (mask & (1 << mode_to_bit(mode))) != 0; - } - - /// Check if any mode in the bitmask has a specific capability - /// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) - bool has_capability(ColorCapability capability) const { - // Lookup the pre-computed bitmask for this capability and check intersection with our mask - // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 - // We need to convert the power-of-2 value to an index - uint8_t cap_val = static_cast(capability); +/// Check if any mode in the bitmask has a specific capability +/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) +inline bool has_capability(const ColorModeMask &mask, ColorCapability capability) { + // Lookup the pre-computed bitmask for this capability and check intersection with our mask + // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 + // We need to convert the power-of-2 value to an index + uint8_t cap_val = static_cast(capability); #if defined(__GNUC__) || defined(__clang__) - // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) - int index = __builtin_ctz(cap_val); + // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) + int index = __builtin_ctz(cap_val); #else - // Fallback for compilers without __builtin_ctz - int index = 0; - while (cap_val > 1) { - cap_val >>= 1; - ++index; - } -#endif - return (this->mask_ & CAPABILITY_BITMASKS[index]) != 0; + // Fallback for compilers without __builtin_ctz + int index = 0; + while (cap_val > 1) { + cap_val >>= 1; + ++index; } - - private: - // Using uint16_t instead of uint32_t for more efficient iteration (fewer bits to scan). - // Currently only 10 ColorMode values exist, so 16 bits is sufficient. - // Can be changed to uint32_t if more than 16 color modes are needed in the future. - // Note: Due to struct padding, uint16_t and uint32_t result in same LightTraits size (12 bytes). - color_mode_bitmask_t mask_{0}; -}; +#endif + return (mask.get_mask() & CAPABILITY_BITMASKS[index]) != 0; +} } // namespace light } // namespace esphome + +// Template specializations for ColorMode must be in global namespace + +/// Map ColorMode enum values to bit positions (0-9) +/// Bit positions follow the enum declaration order +template<> +constexpr int esphome::EnumBitmask::enum_to_bit( + esphome::light::ColorMode mode) { + // Linear search through COLOR_MODE_LOOKUP array + // Compiler optimizes this to efficient code since array is constexpr + for (int i = 0; i < esphome::light::COLOR_MODE_BITMASK_SIZE; ++i) { + if (esphome::light::COLOR_MODE_LOOKUP[i] == mode) + return i; + } + return 0; +} + +/// Map bit positions (0-9) to ColorMode enum values +/// Bit positions follow the enum declaration order +template<> +inline esphome::light::ColorMode esphome::EnumBitmask::bit_to_enum(int bit) { + return (bit >= 0 && bit < esphome::light::COLOR_MODE_BITMASK_SIZE) ? esphome::light::COLOR_MODE_LOOKUP[bit] + : esphome::light::ColorMode::UNKNOWN; +} diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index af193e1f11..26d14d7bb4 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -437,7 +437,7 @@ ColorMode LightCall::compute_color_mode_() { // Use the preferred suitable mode. if (intersection != 0) { - ColorMode mode = ColorModeMask::first_mode_from_mask(intersection); + ColorMode mode = ColorModeMask::first_value_from_mask(intersection); ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(mode))); return mode; diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index 4532edca83..9dec9fb577 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -28,7 +28,7 @@ class LightTraits { bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); } bool supports_color_capability(ColorCapability color_capability) const { - return this->supported_color_modes_.has_capability(color_capability); + return has_capability(this->supported_color_modes_, color_capability); } float get_min_mireds() const { return this->min_mireds_; } diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h new file mode 100644 index 0000000000..4c29c7047e --- /dev/null +++ b/esphome/core/enum_bitmask.h @@ -0,0 +1,155 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace esphome { + +/// Generic bitmask for storing a set of enum values efficiently. +/// Replaces std::set to eliminate red-black tree overhead (~586 bytes per instantiation). +/// +/// Template parameters: +/// EnumType: The enum type to store (must be uint8_t-based) +/// MaxBits: Maximum number of bits needed (auto-selects uint8_t/uint16_t/uint32_t) +/// +/// Requirements: +/// - EnumType must be an enum with sequential values starting from 0 +/// - Specialization must provide enum_to_bit() and bit_to_enum() static methods +/// - MaxBits must be sufficient to hold all enum values +/// +/// Example usage: +/// using ClimateModeMask = EnumBitmask; +/// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); +/// if (modes.contains(CLIMATE_MODE_HEAT)) { ... } +/// for (auto mode : modes) { ... } // Iterate over set bits +/// +/// Design notes: +/// - Uses compile-time type selection for optimal size (uint8_t/uint16_t/uint32_t) +/// - Iterator converts bit positions to actual enum values during traversal +/// - All operations are constexpr-compatible for compile-time initialization +/// - Drop-in replacement for std::set with simpler API +/// +template class EnumBitmask { + public: + // Automatic bitmask type selection based on MaxBits + // ≤8 bits: uint8_t, ≤16 bits: uint16_t, otherwise: uint32_t + using bitmask_t = + typename std::conditional<(MaxBits <= 8), uint8_t, + typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; + + constexpr EnumBitmask() = default; + + /// Construct from initializer list: {VALUE1, VALUE2, ...} + constexpr EnumBitmask(std::initializer_list values) { + for (auto value : values) { + this->add(value); + } + } + + /// Add a single enum value to the set + constexpr void add(EnumType value) { this->mask_ |= (static_cast(1) << enum_to_bit(value)); } + + /// Add multiple enum values from initializer list + constexpr void add(std::initializer_list values) { + for (auto value : values) { + this->add(value); + } + } + + /// Remove an enum value from the set + constexpr void remove(EnumType value) { this->mask_ &= ~(static_cast(1) << enum_to_bit(value)); } + + /// Clear all values from the set + constexpr void clear() { this->mask_ = 0; } + + /// Check if the set contains a specific enum value + constexpr bool contains(EnumType value) const { + return (this->mask_ & (static_cast(1) << enum_to_bit(value))) != 0; + } + + /// Count the number of enum values in the set + constexpr size_t size() const { + // Brian Kernighan's algorithm - efficient for sparse bitmasks + // Typical case: 2-4 modes out of 10 possible + bitmask_t n = this->mask_; + size_t count = 0; + while (n) { + n &= n - 1; // Clear the least significant set bit + count++; + } + return count; + } + + /// Check if the set is empty + constexpr bool empty() const { return this->mask_ == 0; } + + /// Iterator support for range-based for loops and API encoding + /// Iterates over set bits and converts bit positions to enum values + class Iterator { + public: + using iterator_category = std::forward_iterator_tag; + using value_type = EnumType; + using difference_type = std::ptrdiff_t; + using pointer = const EnumType *; + using reference = EnumType; + + constexpr Iterator(bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } + + constexpr EnumType operator*() const { return bit_to_enum(bit_); } + + constexpr Iterator &operator++() { + ++bit_; + advance_to_next_set_bit_(); + return *this; + } + + constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; } + + constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } + + private: + constexpr void advance_to_next_set_bit_() { bit_ = find_next_set_bit(mask_, bit_); } + + bitmask_t mask_; + int bit_; + }; + + constexpr Iterator begin() const { return Iterator(mask_, 0); } + constexpr Iterator end() const { return Iterator(mask_, MaxBits); } + + /// Get the raw bitmask value for optimized operations + constexpr bitmask_t get_mask() const { return this->mask_; } + + /// Check if a specific enum value is present in a raw bitmask + /// Useful for checking intersection results without creating temporary objects + static constexpr bool mask_contains(bitmask_t mask, EnumType value) { + return (mask & (static_cast(1) << enum_to_bit(value))) != 0; + } + + /// Get the first enum value from a raw bitmask + /// Used for optimizing intersection logic (e.g., "pick first suitable mode") + static constexpr EnumType first_value_from_mask(bitmask_t mask) { return bit_to_enum(find_next_set_bit(mask, 0)); } + + /// Find the next set bit in a bitmask starting from a given position + /// Returns the bit position, or MaxBits if no more bits are set + static constexpr int find_next_set_bit(bitmask_t mask, int start_bit) { + int bit = start_bit; + while (bit < MaxBits && !(mask & (static_cast(1) << bit))) { + ++bit; + } + return bit; + } + + protected: + // Must be provided by template specialization + // These convert between enum values and bit positions (0, 1, 2, ...) + static constexpr int enum_to_bit(EnumType value); + static EnumType bit_to_enum(int bit); // Not constexpr due to static array limitation in C++20 + + bitmask_t mask_{0}; +}; + +} // namespace esphome From e2b3617df32cd4c3696eed1aac4ddfcc38aa42a6 Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Wed, 22 Oct 2025 01:08:40 -0700 Subject: [PATCH 284/336] [climate] Fix restore state for fan mode, preset, and swing mode (#11126) Co-authored-by: J. Nick Koston Co-authored-by: J. Nick Koston --- esphome/components/climate/climate.cpp | 60 ++++++++++++-------------- esphome/components/climate/climate.h | 1 + 2 files changed, 28 insertions(+), 33 deletions(-) diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index 87d03f78c5..19fe241729 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -524,13 +524,23 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) { if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { call.set_target_humidity(this->target_humidity); } - if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) { + if (this->uses_custom_fan_mode) { + if (this->custom_fan_mode < traits.get_supported_custom_fan_modes().size()) { + call.fan_mode_.reset(); + call.custom_fan_mode_ = *std::next(traits.get_supported_custom_fan_modes().cbegin(), this->custom_fan_mode); + } + } else if (traits.supports_fan_mode(this->fan_mode)) { call.set_fan_mode(this->fan_mode); } - if (traits.get_supports_presets() || !traits.get_supported_custom_presets().empty()) { + if (this->uses_custom_preset) { + if (this->custom_preset < traits.get_supported_custom_presets().size()) { + call.preset_.reset(); + call.custom_preset_ = *std::next(traits.get_supported_custom_presets().cbegin(), this->custom_preset); + } + } else if (traits.supports_preset(this->preset)) { call.set_preset(this->preset); } - if (traits.get_supports_swing_modes()) { + if (traits.supports_swing_mode(this->swing_mode)) { call.set_swing_mode(this->swing_mode); } return call; @@ -549,41 +559,25 @@ void ClimateDeviceRestoreState::apply(Climate *climate) { if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { climate->target_humidity = this->target_humidity; } - if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) { + if (this->uses_custom_fan_mode) { + if (this->custom_fan_mode < traits.get_supported_custom_fan_modes().size()) { + climate->fan_mode.reset(); + climate->custom_fan_mode = *std::next(traits.get_supported_custom_fan_modes().cbegin(), this->custom_fan_mode); + } + } else if (traits.supports_fan_mode(this->fan_mode)) { climate->fan_mode = this->fan_mode; + climate->custom_fan_mode.reset(); } - if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) { - // std::set has consistent order (lexicographic for strings) - const auto &modes = traits.get_supported_custom_fan_modes(); - if (custom_fan_mode < modes.size()) { - size_t i = 0; - for (const auto &mode : modes) { - if (i == this->custom_fan_mode) { - climate->custom_fan_mode = mode; - break; - } - i++; - } + if (this->uses_custom_preset) { + if (this->custom_preset < traits.get_supported_custom_presets().size()) { + climate->preset.reset(); + climate->custom_preset = *std::next(traits.get_supported_custom_presets().cbegin(), this->custom_preset); } - } - if (traits.get_supports_presets() && !this->uses_custom_preset) { + } else if (traits.supports_preset(this->preset)) { climate->preset = this->preset; + climate->custom_preset.reset(); } - if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) { - // std::set has consistent order (lexicographic for strings) - const auto &presets = traits.get_supported_custom_presets(); - if (custom_preset < presets.size()) { - size_t i = 0; - for (const auto &preset : presets) { - if (i == this->custom_preset) { - climate->custom_preset = preset; - break; - } - i++; - } - } - } - if (traits.get_supports_swing_modes()) { + if (traits.supports_swing_mode(this->swing_mode)) { climate->swing_mode = this->swing_mode; } climate->publish_state(); diff --git a/esphome/components/climate/climate.h b/esphome/components/climate/climate.h index 495464c6a2..0c3e3ebe16 100644 --- a/esphome/components/climate/climate.h +++ b/esphome/components/climate/climate.h @@ -33,6 +33,7 @@ class Climate; class ClimateCall { public: explicit ClimateCall(Climate *parent) : parent_(parent) {} + friend struct ClimateDeviceRestoreState; /// Set the mode of the climate device. ClimateCall &set_mode(ClimateMode mode); From c6711fc354d200bd37558367720c20ebefb5d542 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 22:19:07 -1000 Subject: [PATCH 285/336] adjust --- esphome/components/light/color_mode.h | 6 ++++++ esphome/core/enum_bitmask.h | 3 +++ 2 files changed, 9 insertions(+) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 9c6a4d147b..03132f54bf 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -184,6 +184,12 @@ inline bool has_capability(const ColorModeMask &mask, ColorCapability capability } // namespace esphome // Template specializations for ColorMode must be in global namespace +// +// C++ requires template specializations to be declared in the same namespace as the +// original template. Since EnumBitmask is in the esphome namespace (not esphome::light), +// we must provide these specializations at global scope with fully-qualified names. +// +// These specializations define how ColorMode enum values map to/from bit positions. /// Map ColorMode enum values to bit positions (0-9) /// Bit positions follow the enum declaration order diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h index 4c29c7047e..fdbd0c50cc 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/enum_bitmask.h @@ -26,6 +26,9 @@ namespace esphome { /// if (modes.contains(CLIMATE_MODE_HEAT)) { ... } /// for (auto mode : modes) { ... } // Iterate over set bits /// +/// For complete usage examples with template specializations, see: +/// - esphome/components/light/color_mode.h (ColorMode example) +/// /// Design notes: /// - Uses compile-time type selection for optimal size (uint8_t/uint16_t/uint32_t) /// - Iterator converts bit positions to actual enum values during traversal From d37eb59fd733b3dd06b2e62409249e92bde5b7b8 Mon Sep 17 00:00:00 2001 From: Jeff Brown Date: Wed, 22 Oct 2025 01:22:33 -0700 Subject: [PATCH 286/336] [light] Eliminate dimming undershoot during addressable light transition (#11471) --- .../components/light/addressable_light.cpp | 61 ++++++++++--------- esphome/components/light/addressable_light.h | 1 - 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/esphome/components/light/addressable_light.cpp b/esphome/components/light/addressable_light.cpp index cd83015ecb..5cbdcb0e86 100644 --- a/esphome/components/light/addressable_light.cpp +++ b/esphome/components/light/addressable_light.cpp @@ -61,6 +61,10 @@ void AddressableLightTransformer::start() { this->target_color_ *= to_uint8_scale(end_values.get_brightness() * end_values.get_state()); } +inline constexpr uint8_t subtract_scaled_difference(uint8_t a, uint8_t b, int32_t scale) { + return uint8_t(int32_t(a) - (((int32_t(a) - int32_t(b)) * scale) / 256)); +} + optional AddressableLightTransformer::apply() { float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_()); @@ -74,38 +78,37 @@ optional AddressableLightTransformer::apply() { // all LEDs, we use the current state of each LED as the start. // We can't use a direct lerp smoothing here though - that would require creating a copy of the original - // state of each LED at the start of the transition. - // Instead, we "fake" the look of the LERP by using an exponential average over time and using - // dynamically-calculated alpha values to match the look. + // state of each LED at the start of the transition. Instead, we "fake" the look of lerp by calculating + // the delta between the current state and the target state, assuming that the delta represents the rest + // of the transition that was to be applied as of the previous transition step, and scaling the delta for + // what should be left after the current transition step. In this manner, the delta decays to zero as the + // transition progresses. + // + // Here's an example of how the algorithm progresses in discrete steps: + // + // At time = 0.00, 0% complete, 100% remaining, 100% will remain after this step, so the scale is 100% / 100% = 100%. + // At time = 0.10, 0% complete, 100% remaining, 90% will remain after this step, so the scale is 90% / 100% = 90%. + // At time = 0.20, 10% complete, 90% remaining, 80% will remain after this step, so the scale is 80% / 90% = 88.9%. + // At time = 0.50, 20% complete, 80% remaining, 50% will remain after this step, so the scale is 50% / 80% = 62.5%. + // At time = 0.90, 50% complete, 50% remaining, 10% will remain after this step, so the scale is 10% / 50% = 20%. + // At time = 0.91, 90% complete, 10% remaining, 9% will remain after this step, so the scale is 9% / 10% = 90%. + // At time = 1.00, 91% complete, 9% remaining, 0% will remain after this step, so the scale is 0% / 9% = 0%. + // + // Because the color values are quantized to 8 bit resolution after each step, the transition may appear + // non-linear when applying small deltas. - float denom = (1.0f - smoothed_progress); - float alpha = denom == 0.0f ? 1.0f : (smoothed_progress - this->last_transition_progress_) / denom; - - // We need to use a low-resolution alpha here which makes the transition set in only after ~half of the length - // We solve this by accumulating the fractional part of the alpha over time. - float alpha255 = alpha * 255.0f; - float alpha255int = floorf(alpha255); - float alpha255remainder = alpha255 - alpha255int; - - this->accumulated_alpha_ += alpha255remainder; - float alpha_add = floorf(this->accumulated_alpha_); - this->accumulated_alpha_ -= alpha_add; - - alpha255 += alpha_add; - alpha255 = clamp(alpha255, 0.0f, 255.0f); - auto alpha8 = static_cast(alpha255); - - if (alpha8 != 0) { - uint8_t inv_alpha8 = 255 - alpha8; - Color add = this->target_color_ * alpha8; - - for (auto led : this->light_) - led.set(add + led.get() * inv_alpha8); + if (smoothed_progress > this->last_transition_progress_ && this->last_transition_progress_ < 1.f) { + int32_t scale = int32_t(256.f * std::max((1.f - smoothed_progress) / (1.f - this->last_transition_progress_), 0.f)); + for (auto led : this->light_) { + led.set_rgbw(subtract_scaled_difference(this->target_color_.red, led.get_red(), scale), + subtract_scaled_difference(this->target_color_.green, led.get_green(), scale), + subtract_scaled_difference(this->target_color_.blue, led.get_blue(), scale), + subtract_scaled_difference(this->target_color_.white, led.get_white(), scale)); + } + this->last_transition_progress_ = smoothed_progress; + this->light_.schedule_show(); } - this->last_transition_progress_ = smoothed_progress; - this->light_.schedule_show(); - return {}; } diff --git a/esphome/components/light/addressable_light.h b/esphome/components/light/addressable_light.h index c8ed4897fa..393cc679bc 100644 --- a/esphome/components/light/addressable_light.h +++ b/esphome/components/light/addressable_light.h @@ -113,7 +113,6 @@ class AddressableLightTransformer : public LightTransformer { protected: AddressableLight &light_; float last_transition_progress_{0.0f}; - float accumulated_alpha_{0.0f}; Color target_color_{}; }; From 1119b4e11e21636f02e123053ad043054fae2319 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 22:23:37 -1000 Subject: [PATCH 287/336] [core] Add std::set compatibility aliases to EnumBitmask - Add insert() as alias for add() - Add erase() as alias for remove() - Add count() as alias for contains() - Makes EnumBitmask a true drop-in replacement for std::set - Update documentation to reflect compatibility --- enum_templates.md | 200 +++++++++++++++++++++++++++ esphome/core/enum_bitmask.h | 9 ++ extract_color_mode_mask_helper_pr.md | 98 +++++++++++++ 3 files changed, 307 insertions(+) create mode 100644 enum_templates.md create mode 100644 extract_color_mode_mask_helper_pr.md diff --git a/enum_templates.md b/enum_templates.md new file mode 100644 index 0000000000..175f8d0b89 --- /dev/null +++ b/enum_templates.md @@ -0,0 +1,200 @@ +# EnumBitmask Pattern Documentation + +## Overview + +`EnumBitmask` from `esphome/core/enum_bitmask.h` provides a memory-efficient replacement for `std::set` when storing sets of enum values. + +## When to Use + +Use `EnumBitmask` instead of `std::set` when: +- Storing sets of enum values (e.g., supported modes, capabilities) +- Enum has ≤32 distinct values +- Memory efficiency is important (saves ~586 bytes per `std::set` instance) + +## Benefits + +- **Memory Savings**: Eliminates red-black tree overhead (~586 bytes per instance) +- **Compact Storage**: 1-4 bytes depending on enum count (uint8_t/uint16_t/uint32_t) +- **Constexpr-Compatible**: Supports compile-time initialization +- **Efficient Iteration**: Only visits set bits, not all possible enum values +- **Range-Based Loops**: `for (auto value : mask)` works seamlessly + +## Requirements + +1. Enum must have sequential values (or use a lookup table for mapping) +2. Maximum 32 enum values (uint32_t bitmask limitation) +3. Must provide template specializations for `enum_to_bit()` and `bit_to_enum()` + +## Basic Usage Example + +```cpp +// Bad - red-black tree overhead (~586 bytes) +std::set supported_modes; +supported_modes.insert(ColorMode::RGB); +supported_modes.insert(ColorMode::WHITE); +if (supported_modes.count(ColorMode::RGB)) { ... } + +// Good - compact bitmask storage (2-4 bytes) +ColorModeMask supported_modes({ColorMode::RGB, ColorMode::WHITE}); +if (supported_modes.contains(ColorMode::RGB)) { ... } +for (auto mode : supported_modes) { ... } // Iterate over set values +``` + +## Implementation Pattern + +### 1. Define the Lookup Table + +If enum values aren't sequential from 0, create a lookup table: + +```cpp +// In your component header (e.g., esphome/components/light/color_mode.h) +constexpr ColorMode COLOR_MODE_LOOKUP[10] = { + ColorMode::UNKNOWN, // bit 0 + ColorMode::ON_OFF, // bit 1 + ColorMode::BRIGHTNESS, // bit 2 + ColorMode::WHITE, // bit 3 + ColorMode::COLOR_TEMPERATURE, // bit 4 + ColorMode::COLD_WARM_WHITE, // bit 5 + ColorMode::RGB, // bit 6 + ColorMode::RGB_WHITE, // bit 7 + ColorMode::RGB_COLOR_TEMPERATURE, // bit 8 + ColorMode::RGB_COLD_WARM_WHITE, // bit 9 +}; +``` + +### 2. Create Type Alias + +```cpp +constexpr int COLOR_MODE_BITMASK_SIZE = 10; +using ColorModeMask = EnumBitmask; +``` + +### 3. Provide Template Specializations + +**IMPORTANT**: Specializations must be in the **global namespace** (C++ requirement). Place them at the end of your header file, outside your component namespace. + +```cpp +// At end of header, outside namespace esphome::light +// Template specializations for ColorMode must be in global namespace +// +// C++ requires template specializations to be declared in the same namespace as the +// original template. Since EnumBitmask is in the esphome namespace (not esphome::light), +// we must provide these specializations at global scope with fully-qualified names. +// +// These specializations define how ColorMode enum values map to/from bit positions. + +/// Map ColorMode enum values to bit positions (0-9) +template<> +constexpr int esphome::EnumBitmask::enum_to_bit( + esphome::light::ColorMode mode) { + // Map enum value to bit position (0-9) + for (int i = 0; i < esphome::light::COLOR_MODE_BITMASK_SIZE; ++i) { + if (esphome::light::COLOR_MODE_LOOKUP[i] == mode) + return i; + } + return 0; // Unknown values map to bit 0 (typically reserved for UNKNOWN/NONE) +} + +/// Map bit positions (0-9) to ColorMode enum values +template<> +inline esphome::light::ColorMode esphome::EnumBitmask::bit_to_enum(int bit) { + return (bit >= 0 && bit < esphome::light::COLOR_MODE_BITMASK_SIZE) + ? esphome::light::COLOR_MODE_LOOKUP[bit] + : esphome::light::ColorMode::UNKNOWN; +} +``` + +### Error Handling in enum_to_bit() + +The implementation returns bit 0 for unknown enum values: +```cpp +return 0; // Unknown values map to bit 0 +``` + +This means an unknown ColorMode maps to the same bit as `ColorMode::UNKNOWN`. This is acceptable because: +- Compile-time failure occurs if using invalid enum values +- `ColorMode::UNKNOWN` at bit 0 is semantically correct +- Runtime misuse is prevented by type safety + +## API Compatibility with std::set + +EnumBitmask provides both modern `.contains()` / `.add()` / `.remove()` methods and std::set-compatible aliases for drop-in replacement: + +| Operation | std::set | EnumBitmask | Notes | +|-----------|----------|-------------|-------| +| Add value | `.insert(value)` | `.insert(value)` or `.add(value)` | Both work | +| Check membership | `.count(value)` | `.count(value)` or `.contains(value)` | Both work | +| Remove value | `.erase(value)` | `.erase(value)` or `.remove(value)` | Both work | +| Count elements | `.size()` | `.size()` | Same | +| Check empty | `.empty()` | `.empty()` | Same | +| Clear all | `.clear()` | `.clear()` | Same | +| Iterate | `for (auto v : set)` | `for (auto v : mask)` | Same | + +**Drop-in replacement**: You can use either the std::set-compatible methods (`.insert()`, `.count()`, `.erase()`) or the more explicit methods (`.add()`, `.contains()`, `.remove()`). + +## Complete Usage Example + +See `esphome/components/light/color_mode.h` for a complete real-world implementation showing: +- Lookup table definition +- Type aliases +- Template specializations +- Helper functions using the bitmask + +## Common Patterns + +### Compile-Time Initialization + +```cpp +// Constexpr-compatible for compile-time initialization +constexpr ColorModeMask DEFAULT_MODES({ColorMode::ON_OFF, ColorMode::BRIGHTNESS}); +``` + +### Adding Multiple Values + +```cpp +ColorModeMask modes; +modes.add({ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE}); +``` + +### Checking and Iterating + +```cpp +if (modes.contains(ColorMode::RGB)) { + // RGB mode is supported +} + +for (auto mode : modes) { + // Process each supported mode + ESP_LOGD(TAG, "Supported mode: %d", static_cast(mode)); +} +``` + +### Working with Raw Bitmask Values + +```cpp +// Get raw bitmask for bitwise operations +auto mask = modes.get_mask(); + +// Check if raw bitmask contains a value +if (ColorModeMask::mask_contains(mask, ColorMode::RGB)) { ... } + +// Get first value from raw bitmask +auto first = ColorModeMask::first_value_from_mask(mask); +``` + +## Detection of Opportunities + +Look for these patterns in existing code: +- `std::set` with small enum sets (≤32 values) +- Components storing "supported modes" or "capabilities" +- Red-black tree code (`rb_tree`, `_Rb_tree`) in compiler output +- Flash size increases when adding enum set storage + +## When NOT to Use + +- Enum has >32 distinct values (bitmask limitation) +- Need to store arbitrary runtime-determined integer values (not enum values) +- Enum values are sparse or non-sequential and lookup table would be impractical +- Code readability matters more than memory savings (niche single-use components) diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h index fdbd0c50cc..d5d531763e 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/enum_bitmask.h @@ -62,9 +62,15 @@ template class EnumBitmask { } } + /// std::set compatibility: insert() is an alias for add() + constexpr void insert(EnumType value) { this->add(value); } + /// Remove an enum value from the set constexpr void remove(EnumType value) { this->mask_ &= ~(static_cast(1) << enum_to_bit(value)); } + /// std::set compatibility: erase() is an alias for remove() + constexpr void erase(EnumType value) { this->remove(value); } + /// Clear all values from the set constexpr void clear() { this->mask_ = 0; } @@ -73,6 +79,9 @@ template class EnumBitmask { return (this->mask_ & (static_cast(1) << enum_to_bit(value))) != 0; } + /// std::set compatibility: count() returns 1 if present, 0 if not (same as std::set for unique elements) + constexpr size_t count(EnumType value) const { return this->contains(value) ? 1 : 0; } + /// Count the number of enum values in the set constexpr size_t size() const { // Brian Kernighan's algorithm - efficient for sparse bitmasks diff --git a/extract_color_mode_mask_helper_pr.md b/extract_color_mode_mask_helper_pr.md new file mode 100644 index 0000000000..6a4d98a5f8 --- /dev/null +++ b/extract_color_mode_mask_helper_pr.md @@ -0,0 +1,98 @@ +# What does this implement/fix? + +This PR extracts the `ColorModeMask` implementation from the light component into a generic `EnumBitmask` template helper in `esphome/core/enum_bitmask.h`. This refactoring enables code reuse across other components (e.g., climate, fan) that need efficient enum set storage without STL container overhead. + +## Key Benefits + +- **Code Reuse**: Generic template can be used by any component needing enum bitmask storage (climate, fan, cover, etc.) +- **Memory Efficiency**: Replaces `std::set` with compact bitmask storage (~586 bytes saved per instance) +- **Zero-cost Abstraction**: Maintains same performance characteristics with cleaner, more maintainable code +- **Flash Savings**: 16 bytes reduction on ESP8266 in initial testing + +## Technical Changes + +1. **New Generic Template** (`esphome/core/enum_bitmask.h`): + - `EnumBitmask` template class + - Auto-selects optimal storage type (uint8_t/uint16_t/uint32_t) based on MaxBits + - Provides iterator support, initializer list construction, and static utility methods + - Requires specialization of `enum_to_bit()` and `bit_to_enum()` for each enum type + +2. **std::set Compatibility**: + - Provides both modern API (`.contains()`, `.add()`, `.remove()`) and std::set-compatible aliases (`.count()`, `.insert()`, `.erase()`) + - True drop-in replacement - existing code using `.insert()` and `.count()` works unchanged + +3. **Light Component Refactoring** (`esphome/components/light/color_mode.h`): + - Replaced custom `ColorModeMask` class with `using ColorModeMask = EnumBitmask` + - Single shared `COLOR_MODE_LOOKUP` array eliminates code duplication + - Template specializations provide enum↔bit mapping + - Moved `has_capability()` to namespace-level function for cleaner API + +4. **Updated Call Sites**: + - `light_call.cpp`: Uses `ColorModeMask::first_value_from_mask()` and `ColorModeMask::mask_contains()` static methods + - `light_traits.h`: Uses namespace-level `has_capability()` function + - No changes required to other light component files (drop-in replacement) + +## Design Rationale + +The generic template follows the same pattern as the original `ColorModeMask` but makes it reusable: +- Constexpr-compatible for compile-time initialization +- Iterator support for range-based for loops and API encoding +- Static methods for working with raw bitmask values (for bitwise operation results) +- Protected specialization interface ensures type safety + +This establishes a pattern that can be applied to other components: +- Climate modes/presets (upcoming PR) +- Fan modes +- Cover operations +- Any component with small enum sets (≤32 values) + +## Types of changes + +- [x] Code quality improvements to existing code or addition of tests + +**Related issue or feature (if applicable):** + +- Part of ongoing memory optimization effort for embedded platforms + +**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** + +- N/A (internal refactoring, no user-facing changes) + +## Test Environment + +- [x] ESP32 +- [x] ESP32 IDF +- [x] ESP8266 +- [ ] RP2040 +- [ ] BK72xx +- [ ] RTL87xx +- [ ] nRF52840 + +## Example entry for `config.yaml`: + +```yaml +# No config changes required - internal refactoring only +# All existing light configurations continue to work unchanged + +light: + - platform: rgb + id: test_rgb_light + name: "Test RGB Light" + red: red_output + green: green_output + blue: blue_output +``` + +## Checklist: + - [x] The code change is tested and works locally. + - [x] Tests have been added to verify that the new code works (under `tests/` folder). + +If user exposed functionality or configuration variables are added/changed: + - [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs). + +## Additional Notes + +- **Zero functional changes**: This is a pure refactoring with identical runtime behavior +- **Binary size impact**: Slight improvement on ESP8266 (16 bytes flash reduction) +- **Future work**: Will apply this pattern to climate component in follow-up PR +- **Test coverage**: All modified code covered by existing light component tests From f8f967b25c0304ef00cd26f313eb16bc5306034a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 22:25:57 -1000 Subject: [PATCH 288/336] wi --- enum_templates.md | 200 --------------------------- esphome/core/enum_bitmask.h | 32 ++--- extract_color_mode_mask_helper_pr.md | 98 ------------- 3 files changed, 12 insertions(+), 318 deletions(-) delete mode 100644 enum_templates.md delete mode 100644 extract_color_mode_mask_helper_pr.md diff --git a/enum_templates.md b/enum_templates.md deleted file mode 100644 index 175f8d0b89..0000000000 --- a/enum_templates.md +++ /dev/null @@ -1,200 +0,0 @@ -# EnumBitmask Pattern Documentation - -## Overview - -`EnumBitmask` from `esphome/core/enum_bitmask.h` provides a memory-efficient replacement for `std::set` when storing sets of enum values. - -## When to Use - -Use `EnumBitmask` instead of `std::set` when: -- Storing sets of enum values (e.g., supported modes, capabilities) -- Enum has ≤32 distinct values -- Memory efficiency is important (saves ~586 bytes per `std::set` instance) - -## Benefits - -- **Memory Savings**: Eliminates red-black tree overhead (~586 bytes per instance) -- **Compact Storage**: 1-4 bytes depending on enum count (uint8_t/uint16_t/uint32_t) -- **Constexpr-Compatible**: Supports compile-time initialization -- **Efficient Iteration**: Only visits set bits, not all possible enum values -- **Range-Based Loops**: `for (auto value : mask)` works seamlessly - -## Requirements - -1. Enum must have sequential values (or use a lookup table for mapping) -2. Maximum 32 enum values (uint32_t bitmask limitation) -3. Must provide template specializations for `enum_to_bit()` and `bit_to_enum()` - -## Basic Usage Example - -```cpp -// Bad - red-black tree overhead (~586 bytes) -std::set supported_modes; -supported_modes.insert(ColorMode::RGB); -supported_modes.insert(ColorMode::WHITE); -if (supported_modes.count(ColorMode::RGB)) { ... } - -// Good - compact bitmask storage (2-4 bytes) -ColorModeMask supported_modes({ColorMode::RGB, ColorMode::WHITE}); -if (supported_modes.contains(ColorMode::RGB)) { ... } -for (auto mode : supported_modes) { ... } // Iterate over set values -``` - -## Implementation Pattern - -### 1. Define the Lookup Table - -If enum values aren't sequential from 0, create a lookup table: - -```cpp -// In your component header (e.g., esphome/components/light/color_mode.h) -constexpr ColorMode COLOR_MODE_LOOKUP[10] = { - ColorMode::UNKNOWN, // bit 0 - ColorMode::ON_OFF, // bit 1 - ColorMode::BRIGHTNESS, // bit 2 - ColorMode::WHITE, // bit 3 - ColorMode::COLOR_TEMPERATURE, // bit 4 - ColorMode::COLD_WARM_WHITE, // bit 5 - ColorMode::RGB, // bit 6 - ColorMode::RGB_WHITE, // bit 7 - ColorMode::RGB_COLOR_TEMPERATURE, // bit 8 - ColorMode::RGB_COLD_WARM_WHITE, // bit 9 -}; -``` - -### 2. Create Type Alias - -```cpp -constexpr int COLOR_MODE_BITMASK_SIZE = 10; -using ColorModeMask = EnumBitmask; -``` - -### 3. Provide Template Specializations - -**IMPORTANT**: Specializations must be in the **global namespace** (C++ requirement). Place them at the end of your header file, outside your component namespace. - -```cpp -// At end of header, outside namespace esphome::light -// Template specializations for ColorMode must be in global namespace -// -// C++ requires template specializations to be declared in the same namespace as the -// original template. Since EnumBitmask is in the esphome namespace (not esphome::light), -// we must provide these specializations at global scope with fully-qualified names. -// -// These specializations define how ColorMode enum values map to/from bit positions. - -/// Map ColorMode enum values to bit positions (0-9) -template<> -constexpr int esphome::EnumBitmask::enum_to_bit( - esphome::light::ColorMode mode) { - // Map enum value to bit position (0-9) - for (int i = 0; i < esphome::light::COLOR_MODE_BITMASK_SIZE; ++i) { - if (esphome::light::COLOR_MODE_LOOKUP[i] == mode) - return i; - } - return 0; // Unknown values map to bit 0 (typically reserved for UNKNOWN/NONE) -} - -/// Map bit positions (0-9) to ColorMode enum values -template<> -inline esphome::light::ColorMode esphome::EnumBitmask::bit_to_enum(int bit) { - return (bit >= 0 && bit < esphome::light::COLOR_MODE_BITMASK_SIZE) - ? esphome::light::COLOR_MODE_LOOKUP[bit] - : esphome::light::ColorMode::UNKNOWN; -} -``` - -### Error Handling in enum_to_bit() - -The implementation returns bit 0 for unknown enum values: -```cpp -return 0; // Unknown values map to bit 0 -``` - -This means an unknown ColorMode maps to the same bit as `ColorMode::UNKNOWN`. This is acceptable because: -- Compile-time failure occurs if using invalid enum values -- `ColorMode::UNKNOWN` at bit 0 is semantically correct -- Runtime misuse is prevented by type safety - -## API Compatibility with std::set - -EnumBitmask provides both modern `.contains()` / `.add()` / `.remove()` methods and std::set-compatible aliases for drop-in replacement: - -| Operation | std::set | EnumBitmask | Notes | -|-----------|----------|-------------|-------| -| Add value | `.insert(value)` | `.insert(value)` or `.add(value)` | Both work | -| Check membership | `.count(value)` | `.count(value)` or `.contains(value)` | Both work | -| Remove value | `.erase(value)` | `.erase(value)` or `.remove(value)` | Both work | -| Count elements | `.size()` | `.size()` | Same | -| Check empty | `.empty()` | `.empty()` | Same | -| Clear all | `.clear()` | `.clear()` | Same | -| Iterate | `for (auto v : set)` | `for (auto v : mask)` | Same | - -**Drop-in replacement**: You can use either the std::set-compatible methods (`.insert()`, `.count()`, `.erase()`) or the more explicit methods (`.add()`, `.contains()`, `.remove()`). - -## Complete Usage Example - -See `esphome/components/light/color_mode.h` for a complete real-world implementation showing: -- Lookup table definition -- Type aliases -- Template specializations -- Helper functions using the bitmask - -## Common Patterns - -### Compile-Time Initialization - -```cpp -// Constexpr-compatible for compile-time initialization -constexpr ColorModeMask DEFAULT_MODES({ColorMode::ON_OFF, ColorMode::BRIGHTNESS}); -``` - -### Adding Multiple Values - -```cpp -ColorModeMask modes; -modes.add({ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE}); -``` - -### Checking and Iterating - -```cpp -if (modes.contains(ColorMode::RGB)) { - // RGB mode is supported -} - -for (auto mode : modes) { - // Process each supported mode - ESP_LOGD(TAG, "Supported mode: %d", static_cast(mode)); -} -``` - -### Working with Raw Bitmask Values - -```cpp -// Get raw bitmask for bitwise operations -auto mask = modes.get_mask(); - -// Check if raw bitmask contains a value -if (ColorModeMask::mask_contains(mask, ColorMode::RGB)) { ... } - -// Get first value from raw bitmask -auto first = ColorModeMask::first_value_from_mask(mask); -``` - -## Detection of Opportunities - -Look for these patterns in existing code: -- `std::set` with small enum sets (≤32 values) -- Components storing "supported modes" or "capabilities" -- Red-black tree code (`rb_tree`, `_Rb_tree`) in compiler output -- Flash size increases when adding enum set storage - -## When NOT to Use - -- Enum has >32 distinct values (bitmask limitation) -- Need to store arbitrary runtime-determined integer values (not enum values) -- Enum values are sparse or non-sequential and lookup table would be impractical -- Code readability matters more than memory savings (niche single-use components) diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h index d5d531763e..b3112c610b 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/enum_bitmask.h @@ -23,7 +23,7 @@ namespace esphome { /// Example usage: /// using ClimateModeMask = EnumBitmask; /// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); -/// if (modes.contains(CLIMATE_MODE_HEAT)) { ... } +/// if (modes.count(CLIMATE_MODE_HEAT)) { ... } /// for (auto mode : modes) { ... } // Iterate over set bits /// /// For complete usage examples with template specializations, see: @@ -48,40 +48,32 @@ template class EnumBitmask { /// Construct from initializer list: {VALUE1, VALUE2, ...} constexpr EnumBitmask(std::initializer_list values) { for (auto value : values) { - this->add(value); + this->insert(value); } } - /// Add a single enum value to the set - constexpr void add(EnumType value) { this->mask_ |= (static_cast(1) << enum_to_bit(value)); } + /// Add a single enum value to the set (std::set compatibility) + constexpr void insert(EnumType value) { this->mask_ |= (static_cast(1) << enum_to_bit(value)); } /// Add multiple enum values from initializer list - constexpr void add(std::initializer_list values) { + constexpr void insert(std::initializer_list values) { for (auto value : values) { - this->add(value); + this->insert(value); } } - /// std::set compatibility: insert() is an alias for add() - constexpr void insert(EnumType value) { this->add(value); } - - /// Remove an enum value from the set - constexpr void remove(EnumType value) { this->mask_ &= ~(static_cast(1) << enum_to_bit(value)); } - - /// std::set compatibility: erase() is an alias for remove() - constexpr void erase(EnumType value) { this->remove(value); } + /// Remove an enum value from the set (std::set compatibility) + constexpr void erase(EnumType value) { this->mask_ &= ~(static_cast(1) << enum_to_bit(value)); } /// Clear all values from the set constexpr void clear() { this->mask_ = 0; } - /// Check if the set contains a specific enum value - constexpr bool contains(EnumType value) const { - return (this->mask_ & (static_cast(1) << enum_to_bit(value))) != 0; + /// Check if the set contains a specific enum value (std::set compatibility) + /// Returns 1 if present, 0 if not (same as std::set for unique elements) + constexpr size_t count(EnumType value) const { + return (this->mask_ & (static_cast(1) << enum_to_bit(value))) != 0 ? 1 : 0; } - /// std::set compatibility: count() returns 1 if present, 0 if not (same as std::set for unique elements) - constexpr size_t count(EnumType value) const { return this->contains(value) ? 1 : 0; } - /// Count the number of enum values in the set constexpr size_t size() const { // Brian Kernighan's algorithm - efficient for sparse bitmasks diff --git a/extract_color_mode_mask_helper_pr.md b/extract_color_mode_mask_helper_pr.md deleted file mode 100644 index 6a4d98a5f8..0000000000 --- a/extract_color_mode_mask_helper_pr.md +++ /dev/null @@ -1,98 +0,0 @@ -# What does this implement/fix? - -This PR extracts the `ColorModeMask` implementation from the light component into a generic `EnumBitmask` template helper in `esphome/core/enum_bitmask.h`. This refactoring enables code reuse across other components (e.g., climate, fan) that need efficient enum set storage without STL container overhead. - -## Key Benefits - -- **Code Reuse**: Generic template can be used by any component needing enum bitmask storage (climate, fan, cover, etc.) -- **Memory Efficiency**: Replaces `std::set` with compact bitmask storage (~586 bytes saved per instance) -- **Zero-cost Abstraction**: Maintains same performance characteristics with cleaner, more maintainable code -- **Flash Savings**: 16 bytes reduction on ESP8266 in initial testing - -## Technical Changes - -1. **New Generic Template** (`esphome/core/enum_bitmask.h`): - - `EnumBitmask` template class - - Auto-selects optimal storage type (uint8_t/uint16_t/uint32_t) based on MaxBits - - Provides iterator support, initializer list construction, and static utility methods - - Requires specialization of `enum_to_bit()` and `bit_to_enum()` for each enum type - -2. **std::set Compatibility**: - - Provides both modern API (`.contains()`, `.add()`, `.remove()`) and std::set-compatible aliases (`.count()`, `.insert()`, `.erase()`) - - True drop-in replacement - existing code using `.insert()` and `.count()` works unchanged - -3. **Light Component Refactoring** (`esphome/components/light/color_mode.h`): - - Replaced custom `ColorModeMask` class with `using ColorModeMask = EnumBitmask` - - Single shared `COLOR_MODE_LOOKUP` array eliminates code duplication - - Template specializations provide enum↔bit mapping - - Moved `has_capability()` to namespace-level function for cleaner API - -4. **Updated Call Sites**: - - `light_call.cpp`: Uses `ColorModeMask::first_value_from_mask()` and `ColorModeMask::mask_contains()` static methods - - `light_traits.h`: Uses namespace-level `has_capability()` function - - No changes required to other light component files (drop-in replacement) - -## Design Rationale - -The generic template follows the same pattern as the original `ColorModeMask` but makes it reusable: -- Constexpr-compatible for compile-time initialization -- Iterator support for range-based for loops and API encoding -- Static methods for working with raw bitmask values (for bitwise operation results) -- Protected specialization interface ensures type safety - -This establishes a pattern that can be applied to other components: -- Climate modes/presets (upcoming PR) -- Fan modes -- Cover operations -- Any component with small enum sets (≤32 values) - -## Types of changes - -- [x] Code quality improvements to existing code or addition of tests - -**Related issue or feature (if applicable):** - -- Part of ongoing memory optimization effort for embedded platforms - -**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** - -- N/A (internal refactoring, no user-facing changes) - -## Test Environment - -- [x] ESP32 -- [x] ESP32 IDF -- [x] ESP8266 -- [ ] RP2040 -- [ ] BK72xx -- [ ] RTL87xx -- [ ] nRF52840 - -## Example entry for `config.yaml`: - -```yaml -# No config changes required - internal refactoring only -# All existing light configurations continue to work unchanged - -light: - - platform: rgb - id: test_rgb_light - name: "Test RGB Light" - red: red_output - green: green_output - blue: blue_output -``` - -## Checklist: - - [x] The code change is tested and works locally. - - [x] Tests have been added to verify that the new code works (under `tests/` folder). - -If user exposed functionality or configuration variables are added/changed: - - [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs). - -## Additional Notes - -- **Zero functional changes**: This is a pure refactoring with identical runtime behavior -- **Binary size impact**: Slight improvement on ESP8266 (16 bytes flash reduction) -- **Future work**: Will apply this pattern to climate component in follow-up PR -- **Test coverage**: All modified code covered by existing light component tests From 9d1ceba18f9246162a8e3b8de5219877e46d3da0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Oct 2025 22:28:59 -1000 Subject: [PATCH 289/336] [core] Use std::set API for EnumBitmask - Replace .contains()/.add()/.remove() with .count()/.insert()/.erase() - Makes EnumBitmask a true drop-in replacement for std::set - Update all usages in light component --- esphome/components/light/light_traits.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index 9dec9fb577..294b0cad1d 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -26,7 +26,7 @@ class LightTraits { this->supported_color_modes_ = ColorModeMask(modes); } - bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.contains(color_mode); } + bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode) > 0; } bool supports_color_capability(ColorCapability color_capability) const { return has_capability(this->supported_color_modes_, color_capability); } From 3fda73bcf251e334fd85a881c4c067fad6ffffa4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 00:05:06 -1000 Subject: [PATCH 290/336] bot review --- esphome/components/light/color_mode.h | 20 ++++++++++++-------- esphome/core/enum_bitmask.h | 2 +- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 03132f54bf..77c5a13a6f 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -159,16 +159,13 @@ constexpr uint16_t CAPABILITY_BITMASKS[] = { compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 }; -/// Check if any mode in the bitmask has a specific capability -/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) -inline bool has_capability(const ColorModeMask &mask, ColorCapability capability) { - // Lookup the pre-computed bitmask for this capability and check intersection with our mask - // ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 - // We need to convert the power-of-2 value to an index +/// Convert a power-of-2 ColorCapability value to an array index +/// ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 +inline int capability_to_index(ColorCapability capability) { uint8_t cap_val = static_cast(capability); #if defined(__GNUC__) || defined(__clang__) // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) - int index = __builtin_ctz(cap_val); + return __builtin_ctz(cap_val); #else // Fallback for compilers without __builtin_ctz int index = 0; @@ -176,8 +173,15 @@ inline bool has_capability(const ColorModeMask &mask, ColorCapability capability cap_val >>= 1; ++index; } + return index; #endif - return (mask.get_mask() & CAPABILITY_BITMASKS[index]) != 0; +} + +/// Check if any mode in the bitmask has a specific capability +/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) +inline bool has_capability(const ColorModeMask &mask, ColorCapability capability) { + // Lookup the pre-computed bitmask for this capability and check intersection with our mask + return (mask.get_mask() & CAPABILITY_BITMASKS[capability_to_index(capability)]) != 0; } } // namespace light diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h index b3112c610b..f9cda7ca2d 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/enum_bitmask.h @@ -151,7 +151,7 @@ template class EnumBitmask { // Must be provided by template specialization // These convert between enum values and bit positions (0, 1, 2, ...) static constexpr int enum_to_bit(EnumType value); - static EnumType bit_to_enum(int bit); // Not constexpr due to static array limitation in C++20 + static EnumType bit_to_enum(int bit); // Not constexpr: array indexing with runtime bounds checking bitmask_t mask_{0}; }; From 6edbb945295188f94950288a90a8dad45171464c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 00:06:14 -1000 Subject: [PATCH 291/336] [ci] Fix test detection for components with only variant tests (#11474) --- script/determine-jobs.py | 4 +- tests/script/test_determine_jobs.py | 144 ++++++++++++++++++++++++++++ 2 files changed, 146 insertions(+), 2 deletions(-) diff --git a/script/determine-jobs.py b/script/determine-jobs.py index 7cdec959c7..ac384d74f1 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -336,7 +336,7 @@ def _component_has_tests(component: str) -> bool: Returns: True if the component has test YAML files """ - return bool(get_component_test_files(component)) + return bool(get_component_test_files(component, all_variants=True)) def _select_platform_by_preference( @@ -496,7 +496,7 @@ def detect_memory_impact_config( for component in sorted(changed_component_set): # Look for test files on preferred platforms - test_files = get_component_test_files(component) + test_files = get_component_test_files(component, all_variants=True) if not test_files: continue diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 6095e86ea7..c9ccf53252 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -574,6 +574,105 @@ def test_main_filters_components_without_tests( assert output["memory_impact"]["should_run"] == "false" +def test_main_detects_components_with_variant_tests( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that components with only variant test files (test-*.yaml) are detected. + + This test verifies the fix for components like improv_serial, ethernet, mdns, + improv_base, and safe_mode which only have variant test files (test-*.yaml) + instead of base test files (test.*.yaml). + """ + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = False + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Mock changed_files to return component files + mock_changed_files.return_value = [ + "esphome/components/improv_serial/improv_serial.cpp", + "esphome/components/ethernet/ethernet.cpp", + "esphome/components/no_tests/component.cpp", + ] + + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # improv_serial has only variant tests (like the real component) + improv_serial_dir = tests_dir / "improv_serial" + improv_serial_dir.mkdir(parents=True) + (improv_serial_dir / "test-uart0.esp32-idf.yaml").write_text("test: config") + (improv_serial_dir / "test-uart0.esp8266-ard.yaml").write_text("test: config") + (improv_serial_dir / "test-usb_cdc.esp32-s2-idf.yaml").write_text("test: config") + + # ethernet also has only variant tests + ethernet_dir = tests_dir / "ethernet" + ethernet_dir.mkdir(parents=True) + (ethernet_dir / "test-manual_ip.esp32-idf.yaml").write_text("test: config") + (ethernet_dir / "test-dhcp.esp32-idf.yaml").write_text("test: config") + + # no_tests component has no test files at all + no_tests_dir = tests_dir / "no_tests" + no_tests_dir.mkdir(parents=True) + + # Mock root_path to use tmp_path (need to patch both determine_jobs and helpers) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch("sys.argv", ["determine-jobs.py"]), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["improv_serial", "ethernet", "no_tests"], + ), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["improv_serial", "ethernet"] + if not deps + else ["improv_serial", "ethernet", "no_tests"] + ), + ), + patch.object(determine_jobs, "changed_files", return_value=[]), + ): + # Clear the cache since we're mocking root_path + determine_jobs._component_has_tests.cache_clear() + determine_jobs.main() + + # Check output + captured = capsys.readouterr() + output = json.loads(captured.out) + + # changed_components should have all components + assert set(output["changed_components"]) == { + "improv_serial", + "ethernet", + "no_tests", + } + # changed_components_with_tests should include components with variant tests + assert set(output["changed_components_with_tests"]) == {"improv_serial", "ethernet"} + # component_test_count should be 2 (improv_serial and ethernet) + assert output["component_test_count"] == 2 + # no_tests should be excluded since it has no test files + assert "no_tests" not in output["changed_components_with_tests"] + + # Tests for detect_memory_impact_config function @@ -785,6 +884,51 @@ def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) - assert "i2c" not in result["components"] +def test_detect_memory_impact_config_with_variant_tests(tmp_path: Path) -> None: + """Test memory impact detection for components with only variant test files. + + This verifies that memory impact analysis works correctly for components like + improv_serial, ethernet, mdns, etc. which only have variant test files + (test-*.yaml) instead of base test files (test.*.yaml). + """ + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # improv_serial with only variant tests + improv_serial_dir = tests_dir / "improv_serial" + improv_serial_dir.mkdir(parents=True) + (improv_serial_dir / "test-uart0.esp32-idf.yaml").write_text("test: improv") + (improv_serial_dir / "test-uart0.esp8266-ard.yaml").write_text("test: improv") + (improv_serial_dir / "test-usb_cdc.esp32-s2-idf.yaml").write_text("test: improv") + + # ethernet with only variant tests + ethernet_dir = tests_dir / "ethernet" + ethernet_dir.mkdir(parents=True) + (ethernet_dir / "test-manual_ip.esp32-idf.yaml").write_text("test: ethernet") + (ethernet_dir / "test-dhcp.esp32-c3-idf.yaml").write_text("test: ethernet") + + # Mock changed_files to return both components + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/improv_serial/improv_serial.cpp", + "esphome/components/ethernet/ethernet.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should detect both components even though they only have variant tests + assert result["should_run"] == "true" + assert set(result["components"]) == {"improv_serial", "ethernet"} + # Both components support esp32-idf + assert result["platform"] == "esp32-idf" + assert result["use_merged_config"] == "true" + + # Tests for clang-tidy split mode logic From f592f79bcece7edb4810f09ebdb6e3f25267b398 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 07:30:27 -1000 Subject: [PATCH 292/336] [ci] Fix component splitter for components with only variant tests (#11476) --- script/split_components_for_ci.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index 6ba2598eda..c58dfd218f 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -49,9 +49,9 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool: tests_dir: Path to tests/components directory (unused, kept for compatibility) Returns: - True if the component has test.*.yaml files + True if the component has test.*.yaml or test-*.yaml files """ - return bool(get_component_test_files(component_name)) + return bool(get_component_test_files(component_name, all_variants=True)) def create_intelligent_batches( From 77141d3e83e1bb11107255423556373ba588d3ca Mon Sep 17 00:00:00 2001 From: Jonathan Swoboda <154711427+swoboda1337@users.noreply.github.com> Date: Wed, 22 Oct 2025 14:28:18 -0400 Subject: [PATCH 293/336] [esp32] Set the location of the IDF component manager cache (#11467) --- esphome/components/esp32/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index cb6354cc74..48d11f46fa 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -877,6 +877,11 @@ async def to_code(config): for clean_var in ("IDF_PATH", "IDF_TOOLS_PATH"): os.environ.pop(clean_var, None) + # Set the location of the IDF component manager cache + os.environ["IDF_COMPONENT_CACHE_PATH"] = str( + CORE.relative_internal_path(".espressif") + ) + add_extra_script( "post", "post_build.py", From 92a812e154157ff83168bd9af4fd214831571d96 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 08:30:17 -1000 Subject: [PATCH 294/336] optimize --- esphome/core/enum_bitmask.h | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/esphome/core/enum_bitmask.h b/esphome/core/enum_bitmask.h index f9cda7ca2d..e1840a3ac0 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/enum_bitmask.h @@ -92,6 +92,7 @@ template class EnumBitmask { /// Iterator support for range-based for loops and API encoding /// Iterates over set bits and converts bit positions to enum values + /// Optimization: removes bits from mask as we iterate class Iterator { public: using iterator_category = std::forward_iterator_tag; @@ -100,29 +101,29 @@ template class EnumBitmask { using pointer = const EnumType *; using reference = EnumType; - constexpr Iterator(bitmask_t mask, int bit) : mask_(mask), bit_(bit) { advance_to_next_set_bit_(); } + constexpr explicit Iterator(bitmask_t mask) : mask_(mask) {} - constexpr EnumType operator*() const { return bit_to_enum(bit_); } + constexpr EnumType operator*() const { + // Return enum for the first set bit + return bit_to_enum(find_next_set_bit(mask_, 0)); + } constexpr Iterator &operator++() { - ++bit_; - advance_to_next_set_bit_(); + // Clear the lowest set bit (Brian Kernighan's algorithm) + mask_ &= mask_ - 1; return *this; } - constexpr bool operator==(const Iterator &other) const { return bit_ == other.bit_; } + constexpr bool operator==(const Iterator &other) const { return mask_ == other.mask_; } constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } private: - constexpr void advance_to_next_set_bit_() { bit_ = find_next_set_bit(mask_, bit_); } - bitmask_t mask_; - int bit_; }; - constexpr Iterator begin() const { return Iterator(mask_, 0); } - constexpr Iterator end() const { return Iterator(mask_, MaxBits); } + constexpr Iterator begin() const { return Iterator(mask_); } + constexpr Iterator end() const { return Iterator(0); } /// Get the raw bitmask value for optimized operations constexpr bitmask_t get_mask() const { return this->mask_; } From c70a3cf405a89a5a1b7f0300342c37763d526af6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 08:44:08 -1000 Subject: [PATCH 295/336] feedback --- esphome/components/light/color_mode.h | 14 ++-- .../{enum_bitmask.h => finite_set_mask.h} | 77 ++++++++++--------- 2 files changed, 46 insertions(+), 45 deletions(-) rename esphome/core/{enum_bitmask.h => finite_set_mask.h} (56%) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 77c5a13a6f..61f12f559c 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -1,7 +1,7 @@ #pragma once #include -#include "esphome/core/enum_bitmask.h" +#include "esphome/core/finite_set_mask.h" namespace esphome { namespace light { @@ -127,8 +127,8 @@ constexpr ColorMode COLOR_MODE_LOOKUP[COLOR_MODE_BITMASK_SIZE] = { ColorMode::RGB_COLD_WARM_WHITE, // bit 9 }; -// Type alias for ColorMode bitmask using generic EnumBitmask template -using ColorModeMask = EnumBitmask; +// Type alias for ColorMode bitmask using generic FiniteSetMask template +using ColorModeMask = FiniteSetMask; // Number of ColorCapability enum values constexpr int COLOR_CAPABILITY_COUNT = 6; @@ -190,7 +190,7 @@ inline bool has_capability(const ColorModeMask &mask, ColorCapability capability // Template specializations for ColorMode must be in global namespace // // C++ requires template specializations to be declared in the same namespace as the -// original template. Since EnumBitmask is in the esphome namespace (not esphome::light), +// original template. Since FiniteSetMask is in the esphome namespace (not esphome::light), // we must provide these specializations at global scope with fully-qualified names. // // These specializations define how ColorMode enum values map to/from bit positions. @@ -198,7 +198,7 @@ inline bool has_capability(const ColorModeMask &mask, ColorCapability capability /// Map ColorMode enum values to bit positions (0-9) /// Bit positions follow the enum declaration order template<> -constexpr int esphome::EnumBitmask::enum_to_bit( +constexpr int esphome::FiniteSetMask::value_to_bit( esphome::light::ColorMode mode) { // Linear search through COLOR_MODE_LOOKUP array // Compiler optimizes this to efficient code since array is constexpr @@ -212,8 +212,8 @@ constexpr int esphome::EnumBitmask -inline esphome::light::ColorMode esphome::EnumBitmask::bit_to_enum(int bit) { +inline esphome::light::ColorMode esphome::FiniteSetMask< + esphome::light::ColorMode, esphome::light::COLOR_MODE_BITMASK_SIZE>::bit_to_value(int bit) { return (bit >= 0 && bit < esphome::light::COLOR_MODE_BITMASK_SIZE) ? esphome::light::COLOR_MODE_LOOKUP[bit] : esphome::light::ColorMode::UNKNOWN; } diff --git a/esphome/core/enum_bitmask.h b/esphome/core/finite_set_mask.h similarity index 56% rename from esphome/core/enum_bitmask.h rename to esphome/core/finite_set_mask.h index e1840a3ac0..e6e7564d4b 100644 --- a/esphome/core/enum_bitmask.h +++ b/esphome/core/finite_set_mask.h @@ -8,34 +8,35 @@ namespace esphome { -/// Generic bitmask for storing a set of enum values efficiently. -/// Replaces std::set to eliminate red-black tree overhead (~586 bytes per instantiation). +/// Generic bitmask for storing a finite set of discrete values efficiently. +/// Replaces std::set to eliminate red-black tree overhead (~586 bytes per instantiation). /// /// Template parameters: -/// EnumType: The enum type to store (must be uint8_t-based) +/// ValueType: The type to store (typically enum, but can be any discrete bounded type) /// MaxBits: Maximum number of bits needed (auto-selects uint8_t/uint16_t/uint32_t) /// /// Requirements: -/// - EnumType must be an enum with sequential values starting from 0 -/// - Specialization must provide enum_to_bit() and bit_to_enum() static methods -/// - MaxBits must be sufficient to hold all enum values +/// - ValueType must have a bounded discrete range that maps to bit positions +/// - Specialization must provide value_to_bit() and bit_to_value() static methods +/// - MaxBits must be sufficient to hold all possible values /// /// Example usage: -/// using ClimateModeMask = EnumBitmask; +/// using ClimateModeMask = FiniteSetMask; /// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); /// if (modes.count(CLIMATE_MODE_HEAT)) { ... } /// for (auto mode : modes) { ... } // Iterate over set bits /// /// For complete usage examples with template specializations, see: -/// - esphome/components/light/color_mode.h (ColorMode example) +/// - esphome/components/light/color_mode.h (ColorMode enum example) /// /// Design notes: /// - Uses compile-time type selection for optimal size (uint8_t/uint16_t/uint32_t) -/// - Iterator converts bit positions to actual enum values during traversal +/// - Iterator converts bit positions to actual values during traversal /// - All operations are constexpr-compatible for compile-time initialization -/// - Drop-in replacement for std::set with simpler API +/// - Drop-in replacement for std::set with simpler API +/// - Despite the name, works with any discrete bounded type, not just enums /// -template class EnumBitmask { +template class FiniteSetMask { public: // Automatic bitmask type selection based on MaxBits // ≤8 bits: uint8_t, ≤16 bits: uint16_t, otherwise: uint32_t @@ -43,38 +44,38 @@ template class EnumBitmask { typename std::conditional<(MaxBits <= 8), uint8_t, typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; - constexpr EnumBitmask() = default; + constexpr FiniteSetMask() = default; /// Construct from initializer list: {VALUE1, VALUE2, ...} - constexpr EnumBitmask(std::initializer_list values) { + constexpr FiniteSetMask(std::initializer_list values) { for (auto value : values) { this->insert(value); } } - /// Add a single enum value to the set (std::set compatibility) - constexpr void insert(EnumType value) { this->mask_ |= (static_cast(1) << enum_to_bit(value)); } + /// Add a single value to the set (std::set compatibility) + constexpr void insert(ValueType value) { this->mask_ |= (static_cast(1) << value_to_bit(value)); } - /// Add multiple enum values from initializer list - constexpr void insert(std::initializer_list values) { + /// Add multiple values from initializer list + constexpr void insert(std::initializer_list values) { for (auto value : values) { this->insert(value); } } - /// Remove an enum value from the set (std::set compatibility) - constexpr void erase(EnumType value) { this->mask_ &= ~(static_cast(1) << enum_to_bit(value)); } + /// Remove a value from the set (std::set compatibility) + constexpr void erase(ValueType value) { this->mask_ &= ~(static_cast(1) << value_to_bit(value)); } /// Clear all values from the set constexpr void clear() { this->mask_ = 0; } - /// Check if the set contains a specific enum value (std::set compatibility) + /// Check if the set contains a specific value (std::set compatibility) /// Returns 1 if present, 0 if not (same as std::set for unique elements) - constexpr size_t count(EnumType value) const { - return (this->mask_ & (static_cast(1) << enum_to_bit(value))) != 0 ? 1 : 0; + constexpr size_t count(ValueType value) const { + return (this->mask_ & (static_cast(1) << value_to_bit(value))) != 0 ? 1 : 0; } - /// Count the number of enum values in the set + /// Count the number of values in the set constexpr size_t size() const { // Brian Kernighan's algorithm - efficient for sparse bitmasks // Typical case: 2-4 modes out of 10 possible @@ -91,21 +92,21 @@ template class EnumBitmask { constexpr bool empty() const { return this->mask_ == 0; } /// Iterator support for range-based for loops and API encoding - /// Iterates over set bits and converts bit positions to enum values + /// Iterates over set bits and converts bit positions to values /// Optimization: removes bits from mask as we iterate class Iterator { public: using iterator_category = std::forward_iterator_tag; - using value_type = EnumType; + using value_type = ValueType; using difference_type = std::ptrdiff_t; - using pointer = const EnumType *; - using reference = EnumType; + using pointer = const ValueType *; + using reference = ValueType; constexpr explicit Iterator(bitmask_t mask) : mask_(mask) {} - constexpr EnumType operator*() const { - // Return enum for the first set bit - return bit_to_enum(find_next_set_bit(mask_, 0)); + constexpr ValueType operator*() const { + // Return value for the first set bit + return bit_to_value(find_next_set_bit(mask_, 0)); } constexpr Iterator &operator++() { @@ -128,15 +129,15 @@ template class EnumBitmask { /// Get the raw bitmask value for optimized operations constexpr bitmask_t get_mask() const { return this->mask_; } - /// Check if a specific enum value is present in a raw bitmask + /// Check if a specific value is present in a raw bitmask /// Useful for checking intersection results without creating temporary objects - static constexpr bool mask_contains(bitmask_t mask, EnumType value) { - return (mask & (static_cast(1) << enum_to_bit(value))) != 0; + static constexpr bool mask_contains(bitmask_t mask, ValueType value) { + return (mask & (static_cast(1) << value_to_bit(value))) != 0; } - /// Get the first enum value from a raw bitmask + /// Get the first value from a raw bitmask /// Used for optimizing intersection logic (e.g., "pick first suitable mode") - static constexpr EnumType first_value_from_mask(bitmask_t mask) { return bit_to_enum(find_next_set_bit(mask, 0)); } + static constexpr ValueType first_value_from_mask(bitmask_t mask) { return bit_to_value(find_next_set_bit(mask, 0)); } /// Find the next set bit in a bitmask starting from a given position /// Returns the bit position, or MaxBits if no more bits are set @@ -150,9 +151,9 @@ template class EnumBitmask { protected: // Must be provided by template specialization - // These convert between enum values and bit positions (0, 1, 2, ...) - static constexpr int enum_to_bit(EnumType value); - static EnumType bit_to_enum(int bit); // Not constexpr: array indexing with runtime bounds checking + // These convert between values and bit positions (0, 1, 2, ...) + static constexpr int value_to_bit(ValueType value); + static ValueType bit_to_value(int bit); // Not constexpr: array indexing with runtime bounds checking bitmask_t mask_{0}; }; From 753662feaab5b3df9c98dcf3eb6e9cdea4044964 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 08:47:18 -1000 Subject: [PATCH 296/336] preen --- esphome/components/light/color_mode.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 61f12f559c..71b79ea506 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -105,7 +105,7 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } -// Type alias for raw color mode bitmask values (retained for compatibility) +// Type alias for raw color mode bitmask values using color_mode_bitmask_t = uint16_t; // Number of ColorMode enum values From 02a8024e9499ae5404f6c34ce95fc025822ec7c7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 08:54:21 -1000 Subject: [PATCH 297/336] Update esphome/components/light/color_mode.h Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- esphome/components/light/color_mode.h | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 71b79ea506..1f64b22e82 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -159,8 +159,15 @@ constexpr uint16_t CAPABILITY_BITMASKS[] = { compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 }; -/// Convert a power-of-2 ColorCapability value to an array index -/// ColorCapability values: 1, 2, 4, 8, 16, 32 -> array indices: 0, 1, 2, 3, 4, 5 +/** + * @brief Helper function to convert a power-of-2 ColorCapability value to an array index for CAPABILITY_BITMASKS lookup. + * + * This function maps ColorCapability values (1, 2, 4, 8, 16, 32) to array indices (0, 1, 2, 3, 4, 5). + * Used to index into the CAPABILITY_BITMASKS lookup table. + * + * @param capability A ColorCapability enum value (must be a power of 2). + * @return The corresponding array index (0-based). + */ inline int capability_to_index(ColorCapability capability) { uint8_t cap_val = static_cast(capability); #if defined(__GNUC__) || defined(__clang__) From a335aa0713b5d8905c4baa416639fbc0b51491fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci-lite[bot]" <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 18:56:11 +0000 Subject: [PATCH 298/336] [pre-commit.ci lite] apply automatic fixes --- esphome/components/light/color_mode.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 1f64b22e82..963c36c2a6 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -160,7 +160,8 @@ constexpr uint16_t CAPABILITY_BITMASKS[] = { }; /** - * @brief Helper function to convert a power-of-2 ColorCapability value to an array index for CAPABILITY_BITMASKS lookup. + * @brief Helper function to convert a power-of-2 ColorCapability value to an array index for CAPABILITY_BITMASKS + * lookup. * * This function maps ColorCapability values (1, 2, 4, 8, 16, 32) to array indices (0, 1, 2, 3, 4, 5). * Used to index into the CAPABILITY_BITMASKS lookup table. From 1c67a619459c58fd855771defec3389ae41603f3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 09:10:24 -1000 Subject: [PATCH 299/336] [ci] Fix WiFi testing mode validation and component splitter for variant-only tests (#11481) --- esphome/components/wifi/__init__.py | 14 +++++++++----- script/split_components_for_ci.py | 9 +++++++-- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 29d33bfc76..ba488728b7 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -213,11 +213,15 @@ def _validate(config): if CONF_EAP in config: network[CONF_EAP] = config.pop(CONF_EAP) if CONF_NETWORKS in config: - raise cv.Invalid( - "You cannot use the 'ssid:' option together with 'networks:'. Please " - "copy your network into the 'networks:' key" - ) - config[CONF_NETWORKS] = cv.ensure_list(WIFI_NETWORK_STA)(network) + # In testing mode, merged component tests may have both ssid and networks + # Just use the networks list and ignore the single ssid + if not CORE.testing_mode: + raise cv.Invalid( + "You cannot use the 'ssid:' option together with 'networks:'. Please " + "copy your network into the 'networks:' key" + ) + else: + config[CONF_NETWORKS] = cv.ensure_list(WIFI_NETWORK_STA)(network) if (CONF_NETWORKS not in config) and (CONF_AP not in config): config = config.copy() diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index c58dfd218f..87da540d43 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -118,8 +118,13 @@ def create_intelligent_batches( continue # Get signature from any platform (they should all have the same buses) - # Components not in component_buses were filtered out by has_test_files check - comp_platforms = component_buses[component] + # Components not in component_buses may only have variant-specific tests + comp_platforms = component_buses.get(component) + if not comp_platforms: + # Component has tests but no analyzable base config - treat as no buses + signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component) + continue + for platform, buses in comp_platforms.items(): if buses: signature = create_grouping_signature({platform: buses}, platform) From bc7cc066a5630776c6b61b66a5385cd6781b8de7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 09:54:47 -1000 Subject: [PATCH 300/336] backmerge --- esphome/core/finite_set_mask.h | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/esphome/core/finite_set_mask.h b/esphome/core/finite_set_mask.h index e6e7564d4b..ab2454508f 100644 --- a/esphome/core/finite_set_mask.h +++ b/esphome/core/finite_set_mask.h @@ -17,17 +17,20 @@ namespace esphome { /// /// Requirements: /// - ValueType must have a bounded discrete range that maps to bit positions -/// - Specialization must provide value_to_bit() and bit_to_value() static methods +/// - For 1:1 mappings (contiguous enums starting at 0), no specialization needed +/// - For custom mappings (like ColorMode), specialize value_to_bit() and/or bit_to_value() /// - MaxBits must be sufficient to hold all possible values /// -/// Example usage: -/// using ClimateModeMask = FiniteSetMask; +/// Example usage (1:1 mapping - climate enums): +/// // For enums with contiguous values starting at 0, no specialization needed! +/// using ClimateModeMask = FiniteSetMask; /// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); /// if (modes.count(CLIMATE_MODE_HEAT)) { ... } /// for (auto mode : modes) { ... } // Iterate over set bits /// -/// For complete usage examples with template specializations, see: -/// - esphome/components/light/color_mode.h (ColorMode enum example) +/// Example usage (custom mapping - ColorMode): +/// // For non-contiguous enums or custom mappings, specialize value_to_bit() and/or bit_to_value() +/// // See esphome/components/light/color_mode.h for complete example /// /// Design notes: /// - Uses compile-time type selection for optimal size (uint8_t/uint16_t/uint32_t) @@ -150,10 +153,11 @@ template class FiniteSetMask { } protected: - // Must be provided by template specialization - // These convert between values and bit positions (0, 1, 2, ...) - static constexpr int value_to_bit(ValueType value); - static ValueType bit_to_value(int bit); // Not constexpr: array indexing with runtime bounds checking + // Default implementations for 1:1 mapping (enum value = bit position) + // For enums with contiguous values starting at 0, these defaults work as-is. + // If you need custom mapping (like ColorMode), provide specializations. + static constexpr int value_to_bit(ValueType value) { return static_cast(value); } + static constexpr ValueType bit_to_value(int bit) { return static_cast(bit); } bitmask_t mask_{0}; }; From 22070ac78fce3c12d97b536cf650a5d8b098abe4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 10:07:16 -1000 Subject: [PATCH 301/336] review feedback --- esphome/components/light/color_mode.h | 64 ++++++++++-------------- esphome/core/finite_set_mask.h | 70 +++++++++++++++------------ 2 files changed, 63 insertions(+), 71 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index 963c36c2a6..f5f891d2df 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -108,13 +108,9 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { // Type alias for raw color mode bitmask values using color_mode_bitmask_t = uint16_t; -// Number of ColorMode enum values -constexpr int COLOR_MODE_BITMASK_SIZE = 10; - -// Shared lookup table for ColorMode bit mapping +// Lookup table for ColorMode bit mapping // This array defines the canonical order of color modes (bit 0-9) -// Declared early so it can be used by constexpr functions -constexpr ColorMode COLOR_MODE_LOOKUP[COLOR_MODE_BITMASK_SIZE] = { +constexpr ColorMode COLOR_MODE_LOOKUP[] = { ColorMode::UNKNOWN, // bit 0 ColorMode::ON_OFF, // bit 1 ColorMode::BRIGHTNESS, // bit 2 @@ -127,8 +123,29 @@ constexpr ColorMode COLOR_MODE_LOOKUP[COLOR_MODE_BITMASK_SIZE] = { ColorMode::RGB_COLD_WARM_WHITE, // bit 9 }; -// Type alias for ColorMode bitmask using generic FiniteSetMask template -using ColorModeMask = FiniteSetMask; +/// Bit mapping policy for ColorMode +/// Uses lookup table for non-contiguous enum values +struct ColorModeBitPolicy { + using mask_t = uint16_t; // 10 bits requires uint16_t + static constexpr int max_bits = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); + + static constexpr unsigned to_bit(ColorMode mode) { + // Linear search through lookup table + // Compiler optimizes this to efficient code since array is constexpr + for (int i = 0; i < max_bits; ++i) { + if (COLOR_MODE_LOOKUP[i] == mode) + return i; + } + return 0; + } + + static constexpr ColorMode from_bit(unsigned bit) { + return (bit < max_bits) ? COLOR_MODE_LOOKUP[bit] : ColorMode::UNKNOWN; + } +}; + +// Type alias for ColorMode bitmask using policy-based design +using ColorModeMask = FiniteSetMask; // Number of ColorCapability enum values constexpr int COLOR_CAPABILITY_COUNT = 6; @@ -194,34 +211,3 @@ inline bool has_capability(const ColorModeMask &mask, ColorCapability capability } // namespace light } // namespace esphome - -// Template specializations for ColorMode must be in global namespace -// -// C++ requires template specializations to be declared in the same namespace as the -// original template. Since FiniteSetMask is in the esphome namespace (not esphome::light), -// we must provide these specializations at global scope with fully-qualified names. -// -// These specializations define how ColorMode enum values map to/from bit positions. - -/// Map ColorMode enum values to bit positions (0-9) -/// Bit positions follow the enum declaration order -template<> -constexpr int esphome::FiniteSetMask::value_to_bit( - esphome::light::ColorMode mode) { - // Linear search through COLOR_MODE_LOOKUP array - // Compiler optimizes this to efficient code since array is constexpr - for (int i = 0; i < esphome::light::COLOR_MODE_BITMASK_SIZE; ++i) { - if (esphome::light::COLOR_MODE_LOOKUP[i] == mode) - return i; - } - return 0; -} - -/// Map bit positions (0-9) to ColorMode enum values -/// Bit positions follow the enum declaration order -template<> -inline esphome::light::ColorMode esphome::FiniteSetMask< - esphome::light::ColorMode, esphome::light::COLOR_MODE_BITMASK_SIZE>::bit_to_value(int bit) { - return (bit >= 0 && bit < esphome::light::COLOR_MODE_BITMASK_SIZE) ? esphome::light::COLOR_MODE_LOOKUP[bit] - : esphome::light::ColorMode::UNKNOWN; -} diff --git a/esphome/core/finite_set_mask.h b/esphome/core/finite_set_mask.h index ab2454508f..d3f0b52a71 100644 --- a/esphome/core/finite_set_mask.h +++ b/esphome/core/finite_set_mask.h @@ -8,44 +8,54 @@ namespace esphome { +/// Default bit mapping policy for contiguous enums starting at 0 +/// Provides 1:1 mapping where enum value equals bit position +template struct DefaultBitPolicy { + // Automatic bitmask type selection based on MaxBits + // ≤8 bits: uint8_t, ≤16 bits: uint16_t, otherwise: uint32_t + using mask_t = typename std::conditional<(MaxBits <= 8), uint8_t, + typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; + + static constexpr int max_bits = MaxBits; + + static constexpr unsigned to_bit(ValueType value) { return static_cast(value); } + + static constexpr ValueType from_bit(unsigned bit) { return static_cast(bit); } +}; + /// Generic bitmask for storing a finite set of discrete values efficiently. /// Replaces std::set to eliminate red-black tree overhead (~586 bytes per instantiation). /// /// Template parameters: /// ValueType: The type to store (typically enum, but can be any discrete bounded type) -/// MaxBits: Maximum number of bits needed (auto-selects uint8_t/uint16_t/uint32_t) +/// BitPolicy: Policy class defining bit mapping and mask type (defaults to DefaultBitPolicy) /// -/// Requirements: -/// - ValueType must have a bounded discrete range that maps to bit positions -/// - For 1:1 mappings (contiguous enums starting at 0), no specialization needed -/// - For custom mappings (like ColorMode), specialize value_to_bit() and/or bit_to_value() -/// - MaxBits must be sufficient to hold all possible values +/// BitPolicy requirements: +/// - using mask_t = // Bitmask storage type +/// - static constexpr int max_bits // Maximum number of bits +/// - static constexpr unsigned to_bit(ValueType) // Convert value to bit position +/// - static constexpr ValueType from_bit(unsigned) // Convert bit position to value /// /// Example usage (1:1 mapping - climate enums): -/// // For enums with contiguous values starting at 0, no specialization needed! -/// using ClimateModeMask = FiniteSetMask; +/// // For contiguous enums starting at 0, use DefaultBitPolicy +/// using ClimateModeMask = FiniteSetMask>; /// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); /// if (modes.count(CLIMATE_MODE_HEAT)) { ... } -/// for (auto mode : modes) { ... } // Iterate over set bits +/// for (auto mode : modes) { ... } /// /// Example usage (custom mapping - ColorMode): -/// // For non-contiguous enums or custom mappings, specialize value_to_bit() and/or bit_to_value() +/// // For custom mappings, define a custom BitPolicy /// // See esphome/components/light/color_mode.h for complete example /// /// Design notes: -/// - Uses compile-time type selection for optimal size (uint8_t/uint16_t/uint32_t) +/// - Policy-based design allows custom bit mappings without template specialization /// - Iterator converts bit positions to actual values during traversal /// - All operations are constexpr-compatible for compile-time initialization /// - Drop-in replacement for std::set with simpler API -/// - Despite the name, works with any discrete bounded type, not just enums /// -template class FiniteSetMask { +template> class FiniteSetMask { public: - // Automatic bitmask type selection based on MaxBits - // ≤8 bits: uint8_t, ≤16 bits: uint16_t, otherwise: uint32_t - using bitmask_t = - typename std::conditional<(MaxBits <= 8), uint8_t, - typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; + using bitmask_t = typename BitPolicy::mask_t; constexpr FiniteSetMask() = default; @@ -57,7 +67,7 @@ template class FiniteSetMask { } /// Add a single value to the set (std::set compatibility) - constexpr void insert(ValueType value) { this->mask_ |= (static_cast(1) << value_to_bit(value)); } + constexpr void insert(ValueType value) { this->mask_ |= (static_cast(1) << BitPolicy::to_bit(value)); } /// Add multiple values from initializer list constexpr void insert(std::initializer_list values) { @@ -67,7 +77,7 @@ template class FiniteSetMask { } /// Remove a value from the set (std::set compatibility) - constexpr void erase(ValueType value) { this->mask_ &= ~(static_cast(1) << value_to_bit(value)); } + constexpr void erase(ValueType value) { this->mask_ &= ~(static_cast(1) << BitPolicy::to_bit(value)); } /// Clear all values from the set constexpr void clear() { this->mask_ = 0; } @@ -75,7 +85,7 @@ template class FiniteSetMask { /// Check if the set contains a specific value (std::set compatibility) /// Returns 1 if present, 0 if not (same as std::set for unique elements) constexpr size_t count(ValueType value) const { - return (this->mask_ & (static_cast(1) << value_to_bit(value))) != 0 ? 1 : 0; + return (this->mask_ & (static_cast(1) << BitPolicy::to_bit(value))) != 0 ? 1 : 0; } /// Count the number of values in the set @@ -109,7 +119,7 @@ template class FiniteSetMask { constexpr ValueType operator*() const { // Return value for the first set bit - return bit_to_value(find_next_set_bit(mask_, 0)); + return BitPolicy::from_bit(find_next_set_bit(mask_, 0)); } constexpr Iterator &operator++() { @@ -135,30 +145,26 @@ template class FiniteSetMask { /// Check if a specific value is present in a raw bitmask /// Useful for checking intersection results without creating temporary objects static constexpr bool mask_contains(bitmask_t mask, ValueType value) { - return (mask & (static_cast(1) << value_to_bit(value))) != 0; + return (mask & (static_cast(1) << BitPolicy::to_bit(value))) != 0; } /// Get the first value from a raw bitmask /// Used for optimizing intersection logic (e.g., "pick first suitable mode") - static constexpr ValueType first_value_from_mask(bitmask_t mask) { return bit_to_value(find_next_set_bit(mask, 0)); } + static constexpr ValueType first_value_from_mask(bitmask_t mask) { + return BitPolicy::from_bit(find_next_set_bit(mask, 0)); + } /// Find the next set bit in a bitmask starting from a given position - /// Returns the bit position, or MaxBits if no more bits are set + /// Returns the bit position, or max_bits if no more bits are set static constexpr int find_next_set_bit(bitmask_t mask, int start_bit) { int bit = start_bit; - while (bit < MaxBits && !(mask & (static_cast(1) << bit))) { + while (bit < BitPolicy::max_bits && !(mask & (static_cast(1) << bit))) { ++bit; } return bit; } protected: - // Default implementations for 1:1 mapping (enum value = bit position) - // For enums with contiguous values starting at 0, these defaults work as-is. - // If you need custom mapping (like ColorMode), provide specializations. - static constexpr int value_to_bit(ValueType value) { return static_cast(value); } - static constexpr ValueType bit_to_value(int bit) { return static_cast(bit); } - bitmask_t mask_{0}; }; From 1bebdb2c00539bea061f5eeb04e2be7665cf4c7c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 10:12:58 -1000 Subject: [PATCH 302/336] fix refactoring error --- esphome/components/light/color_mode.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index f5f891d2df..fde06ef38c 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -156,7 +156,8 @@ constexpr uint16_t compute_capability_bitmask(ColorCapability capability) { uint8_t cap_bit = static_cast(capability); // Check each ColorMode to see if it has this capability - for (int bit = 0; bit < COLOR_MODE_BITMASK_SIZE; ++bit) { + constexpr int color_mode_count = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); + for (int bit = 0; bit < color_mode_count; ++bit) { uint8_t mode_val = static_cast(COLOR_MODE_LOOKUP[bit]); if ((mode_val & cap_bit) != 0) { mask |= (1 << bit); From 3dfb2ba70e2de4e52698df464c4ad0cde31767ea Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 10:18:26 -1000 Subject: [PATCH 303/336] tidy --- esphome/components/light/color_mode.h | 6 +++--- esphome/core/finite_set_mask.h | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index fde06ef38c..aa3448c145 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -127,12 +127,12 @@ constexpr ColorMode COLOR_MODE_LOOKUP[] = { /// Uses lookup table for non-contiguous enum values struct ColorModeBitPolicy { using mask_t = uint16_t; // 10 bits requires uint16_t - static constexpr int max_bits = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); + static constexpr int MAX_BITS = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); static constexpr unsigned to_bit(ColorMode mode) { // Linear search through lookup table // Compiler optimizes this to efficient code since array is constexpr - for (int i = 0; i < max_bits; ++i) { + for (int i = 0; i < MAX_BITS; ++i) { if (COLOR_MODE_LOOKUP[i] == mode) return i; } @@ -140,7 +140,7 @@ struct ColorModeBitPolicy { } static constexpr ColorMode from_bit(unsigned bit) { - return (bit < max_bits) ? COLOR_MODE_LOOKUP[bit] : ColorMode::UNKNOWN; + return (bit < MAX_BITS) ? COLOR_MODE_LOOKUP[bit] : ColorMode::UNKNOWN; } }; diff --git a/esphome/core/finite_set_mask.h b/esphome/core/finite_set_mask.h index d3f0b52a71..f9cd0377c7 100644 --- a/esphome/core/finite_set_mask.h +++ b/esphome/core/finite_set_mask.h @@ -16,7 +16,7 @@ template struct DefaultBitPolicy { using mask_t = typename std::conditional<(MaxBits <= 8), uint8_t, typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; - static constexpr int max_bits = MaxBits; + static constexpr int MAX_BITS = MaxBits; static constexpr unsigned to_bit(ValueType value) { return static_cast(value); } @@ -32,7 +32,7 @@ template struct DefaultBitPolicy { /// /// BitPolicy requirements: /// - using mask_t = // Bitmask storage type -/// - static constexpr int max_bits // Maximum number of bits +/// - static constexpr int MAX_BITS // Maximum number of bits /// - static constexpr unsigned to_bit(ValueType) // Convert value to bit position /// - static constexpr ValueType from_bit(unsigned) // Convert bit position to value /// @@ -155,10 +155,10 @@ template(1) << bit))) { + while (bit < BitPolicy::MAX_BITS && !(mask & (static_cast(1) << bit))) { ++bit; } return bit; From f559fad4fccb5928b7e041a8ecb97b7ef2f93289 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:03:32 -1000 Subject: [PATCH 304/336] [fan] Use FixedVector for preset modes, preserve config order (breaking) --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_pb2.h | 2 +- esphome/components/fan/fan.cpp | 31 +++++++++++++------ esphome/components/fan/fan.h | 2 +- esphome/components/fan/fan_traits.h | 23 +++++++++----- .../components/hbridge/fan/hbridge_fan.cpp | 3 +- esphome/components/hbridge/fan/hbridge_fan.h | 9 +++--- esphome/components/speed/fan/speed_fan.cpp | 3 +- esphome/components/speed/fan/speed_fan.h | 9 +++--- .../components/template/fan/template_fan.cpp | 3 +- .../components/template/fan/template_fan.h | 9 +++--- tests/components/fan/common.yaml | 11 +++++++ tests/components/fan/test.esp8266-ard.yaml | 1 + 13 files changed, 70 insertions(+), 38 deletions(-) create mode 100644 tests/components/fan/common.yaml create mode 100644 tests/components/fan/test.esp8266-ard.yaml diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index d202486cfa..34be6e4aa2 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "std::set"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "FixedVector"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index ed49498176..647dd47b89 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const std::set *supported_preset_modes{}; + const FixedVector *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 26065ed644..839b0d08cc 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -39,7 +39,7 @@ void FanCall::perform() { } void FanCall::validate_() { - auto traits = this->parent_.get_traits(); + const auto &traits = this->parent_.get_traits(); if (this->speed_.has_value()) { this->speed_ = clamp(*this->speed_, 1, traits.supported_speed_count()); @@ -51,7 +51,15 @@ void FanCall::validate_() { if (!this->preset_mode_.empty()) { const auto &preset_modes = traits.supported_preset_modes(); - if (preset_modes.find(this->preset_mode_) == preset_modes.end()) { + // Linear search is efficient for small preset mode lists (typically 2-5 items) + bool found = false; + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode_) { + found = true; + break; + } + } + if (!found) { ESP_LOGW(TAG, "%s: Preset mode '%s' not supported", this->parent_.get_name().c_str(), this->preset_mode_.c_str()); this->preset_mode_.clear(); } @@ -96,7 +104,7 @@ FanCall FanRestoreState::to_call(Fan &fan) { // Use stored preset index to get preset name const auto &preset_modes = fan.get_traits().supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { - call.set_preset_mode(*std::next(preset_modes.begin(), this->preset_mode)); + call.set_preset_mode(preset_modes[this->preset_mode]); } } return call; @@ -111,7 +119,7 @@ void FanRestoreState::apply(Fan &fan) { // Use stored preset index to get preset name const auto &preset_modes = fan.get_traits().supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { - fan.preset_mode = *std::next(preset_modes.begin(), this->preset_mode); + fan.preset_mode = preset_modes[this->preset_mode]; } } fan.publish_state(); @@ -124,7 +132,7 @@ FanCall Fan::make_call() { return FanCall(*this); } void Fan::add_on_state_callback(std::function &&callback) { this->state_callback_.add(std::move(callback)); } void Fan::publish_state() { - auto traits = this->get_traits(); + const auto &traits = this->get_traits(); ESP_LOGD(TAG, "'%s' - Sending state:", this->name_.c_str()); ESP_LOGD(TAG, " State: %s", ONOFF(this->state)); @@ -190,17 +198,20 @@ void Fan::save_state_() { if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { const auto &preset_modes = this->get_traits().supported_preset_modes(); - // Store index of current preset mode - auto preset_iterator = preset_modes.find(this->preset_mode); - if (preset_iterator != preset_modes.end()) - state.preset_mode = std::distance(preset_modes.begin(), preset_iterator); + // Store index of current preset mode - linear search is efficient for small lists + for (size_t i = 0; i < preset_modes.size(); i++) { + if (preset_modes[i] == this->preset_mode) { + state.preset_mode = i; + break; + } + } } this->rtc_.save(&state); } void Fan::dump_traits_(const char *tag, const char *prefix) { - auto traits = this->get_traits(); + const auto &traits = this->get_traits(); if (traits.supports_speed()) { ESP_LOGCONFIG(tag, diff --git a/esphome/components/fan/fan.h b/esphome/components/fan/fan.h index b74187eb4a..901181903a 100644 --- a/esphome/components/fan/fan.h +++ b/esphome/components/fan/fan.h @@ -127,7 +127,7 @@ class Fan : public EntityBase { void publish_state(); - virtual FanTraits get_traits() = 0; + virtual const FanTraits &get_traits() = 0; /// Set the restore mode of this fan. void set_restore_mode(FanRestoreMode restore_mode) { this->restore_mode_ = restore_mode; } diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 48509e5705..e0b64aa0fa 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,5 +1,5 @@ -#include #include +#include "esphome/core/helpers.h" #pragma once @@ -36,9 +36,18 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - std::set supported_preset_modes() const { return this->preset_modes_; } - /// Set the preset modes supported by the fan. - void set_supported_preset_modes(const std::set &preset_modes) { this->preset_modes_ = preset_modes; } + const FixedVector &supported_preset_modes() const { return this->preset_modes_; } + /// Set the preset modes supported by the fan (from initializer list). + void set_supported_preset_modes(const std::initializer_list &preset_modes) { + this->preset_modes_ = preset_modes; + } + /// Set the preset modes supported by the fan (from FixedVector). + template void set_supported_preset_modes(const T &preset_modes) { + this->preset_modes_.init(preset_modes.size()); + for (const auto &mode : preset_modes) { + this->preset_modes_.push_back(mode); + } + } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_.empty(); } @@ -46,17 +55,17 @@ class FanTraits { #ifdef USE_API // The API connection is a friend class to access internal methods friend class api::APIConnection; - // This method returns a reference to the internal preset modes set. + // This method returns a reference to the internal preset modes. // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const std::set &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const FixedVector &supported_preset_modes_for_api_() const { return this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - std::set preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.cpp b/esphome/components/hbridge/fan/hbridge_fan.cpp index 605a9d4ef3..c059783b1e 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.cpp +++ b/esphome/components/hbridge/fan/hbridge_fan.cpp @@ -36,7 +36,8 @@ void HBridgeFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); + if (!this->preset_modes_.empty()) + this->traits_.set_supported_preset_modes(this->preset_modes_); } void HBridgeFan::dump_config() { diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 4234fccae3..68458d7922 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -1,8 +1,7 @@ #pragma once -#include - #include "esphome/core/automation.h" +#include "esphome/core/helpers.h" #include "esphome/components/output/binary_output.h" #include "esphome/components/output/float_output.h" #include "esphome/components/fan/fan.h" @@ -22,11 +21,11 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::set &presets) { preset_modes_ = presets; } + void set_preset_modes(const std::initializer_list &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; - fan::FanTraits get_traits() override { return this->traits_; } + const fan::FanTraits &get_traits() override { return this->traits_; } fan::FanCall brake(); @@ -38,7 +37,7 @@ class HBridgeFan : public Component, public fan::Fan { int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; fan::FanTraits traits_; - std::set preset_modes_{}; + FixedVector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.cpp b/esphome/components/speed/fan/speed_fan.cpp index 57bd795416..9205d3592b 100644 --- a/esphome/components/speed/fan/speed_fan.cpp +++ b/esphome/components/speed/fan/speed_fan.cpp @@ -15,7 +15,8 @@ void SpeedFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); + if (!this->preset_modes_.empty()) + this->traits_.set_supported_preset_modes(this->preset_modes_); } void SpeedFan::dump_config() { LOG_FAN("", "Speed Fan", this); } diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index 6537bce3f6..60c2267b04 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -1,8 +1,7 @@ #pragma once -#include - #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/output/binary_output.h" #include "esphome/components/output/float_output.h" #include "esphome/components/fan/fan.h" @@ -18,8 +17,8 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override { return this->traits_; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } + const fan::FanTraits &get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -30,7 +29,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *direction_{nullptr}; int speed_count_{}; fan::FanTraits traits_; - std::set preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.cpp b/esphome/components/template/fan/template_fan.cpp index 5f4a2ae8f7..477e2c4981 100644 --- a/esphome/components/template/fan/template_fan.cpp +++ b/esphome/components/template/fan/template_fan.cpp @@ -15,7 +15,8 @@ void TemplateFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); + if (!this->preset_modes_.empty()) + this->traits_.set_supported_preset_modes(this->preset_modes_); } void TemplateFan::dump_config() { LOG_FAN("", "Template Fan", this); } diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 7f5305ca48..5b175b21a4 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -1,8 +1,7 @@ #pragma once -#include - #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/fan/fan.h" namespace esphome { @@ -16,8 +15,8 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override { return this->traits_; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } + const fan::FanTraits &get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -26,7 +25,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_direction_{false}; int speed_count_{0}; fan::FanTraits traits_; - std::set preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace template_ diff --git a/tests/components/fan/common.yaml b/tests/components/fan/common.yaml new file mode 100644 index 0000000000..55c2a656fd --- /dev/null +++ b/tests/components/fan/common.yaml @@ -0,0 +1,11 @@ +fan: + - platform: template + id: test_fan + name: "Test Fan" + preset_modes: + - Eco + - Sleep + - Turbo + has_oscillating: true + has_direction: true + speed_count: 3 diff --git a/tests/components/fan/test.esp8266-ard.yaml b/tests/components/fan/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/fan/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 04d127015c1b7e1741fb080f3bd4d983c4bd242b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:04:38 -1000 Subject: [PATCH 305/336] Add basic fan compile tests baseline for https://github.com/esphome/esphome/pull/11483 --- tests/components/fan/common.yaml | 11 +++++++++++ tests/components/fan/test.esp8266-ard.yaml | 1 + 2 files changed, 12 insertions(+) create mode 100644 tests/components/fan/common.yaml create mode 100644 tests/components/fan/test.esp8266-ard.yaml diff --git a/tests/components/fan/common.yaml b/tests/components/fan/common.yaml new file mode 100644 index 0000000000..55c2a656fd --- /dev/null +++ b/tests/components/fan/common.yaml @@ -0,0 +1,11 @@ +fan: + - platform: template + id: test_fan + name: "Test Fan" + preset_modes: + - Eco + - Sleep + - Turbo + has_oscillating: true + has_direction: true + speed_count: 3 diff --git a/tests/components/fan/test.esp8266-ard.yaml b/tests/components/fan/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/fan/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From f2de8df556d01b9faba6ab7f732df39bfe3c4ea9 Mon Sep 17 00:00:00 2001 From: Daniel Stiner Date: Wed, 22 Oct 2025 14:07:01 -0700 Subject: [PATCH 306/336] [openthread] Fix OTA by populating CORE.address with device's mDNS address (#11095) Co-authored-by: J. Nick Koston Co-authored-by: J. Nick Koston Co-authored-by: pre-commit-ci-lite[bot] <117423508+pre-commit-ci-lite[bot]@users.noreply.github.com> --- esphome/components/network/util.cpp | 6 +++++- esphome/components/openthread/__init__.py | 13 ++++++++++++- esphome/components/openthread/openthread.cpp | 6 ++++++ esphome/components/openthread/openthread.h | 4 ++++ esphome/core/__init__.py | 8 +++----- tests/unit_tests/test_core.py | 6 ++++-- 6 files changed, 34 insertions(+), 9 deletions(-) diff --git a/esphome/components/network/util.cpp b/esphome/components/network/util.cpp index 27ad9448a4..cb8f8569ad 100644 --- a/esphome/components/network/util.cpp +++ b/esphome/components/network/util.cpp @@ -99,7 +99,11 @@ const std::string &get_use_address() { return wifi::global_wifi_component->get_use_address(); #endif -#if !defined(USE_ETHERNET) && !defined(USE_MODEM) && !defined(USE_WIFI) +#ifdef USE_OPENTHREAD + return openthread::global_openthread_component->get_use_address(); +#endif + +#if !defined(USE_ETHERNET) && !defined(USE_MODEM) && !defined(USE_WIFI) && !defined(USE_OPENTHREAD) // Fallback when no network component is defined (e.g., host platform) static const std::string empty; return empty; diff --git a/esphome/components/openthread/__init__.py b/esphome/components/openthread/__init__.py index 4865399d02..572ec144d4 100644 --- a/esphome/components/openthread/__init__.py +++ b/esphome/components/openthread/__init__.py @@ -8,8 +8,10 @@ from esphome.components.esp32 import ( ) from esphome.components.mdns import MDNSComponent, enable_mdns_storage import esphome.config_validation as cv -from esphome.const import CONF_CHANNEL, CONF_ENABLE_IPV6, CONF_ID +from esphome.const import CONF_CHANNEL, CONF_ENABLE_IPV6, CONF_ID, CONF_USE_ADDRESS +from esphome.core import CORE import esphome.final_validate as fv +from esphome.types import ConfigType from .const import ( CONF_DEVICE_TYPE, @@ -108,6 +110,12 @@ _CONNECTION_SCHEMA = cv.Schema( ) +def _validate(config: ConfigType) -> ConfigType: + if CONF_USE_ADDRESS not in config: + config[CONF_USE_ADDRESS] = f"{CORE.name}.local" + return config + + def _require_vfs_select(config): """Register VFS select requirement during config validation.""" # OpenThread uses esp_vfs_eventfd which requires VFS select support @@ -126,11 +134,13 @@ CONFIG_SCHEMA = cv.All( ), cv.Optional(CONF_FORCE_DATASET): cv.boolean, cv.Optional(CONF_TLV): cv.string_strict, + cv.Optional(CONF_USE_ADDRESS): cv.string_strict, } ).extend(_CONNECTION_SCHEMA), cv.has_exactly_one_key(CONF_NETWORK_KEY, CONF_TLV), cv.only_with_esp_idf, only_on_variant(supported=[VARIANT_ESP32C6, VARIANT_ESP32H2]), + _validate, _require_vfs_select, ) @@ -155,6 +165,7 @@ async def to_code(config): enable_mdns_storage() ot = cg.new_Pvariable(config[CONF_ID]) + cg.add(ot.set_use_address(config[CONF_USE_ADDRESS])) await cg.register_component(ot, config) srp = cg.new_Pvariable(config[CONF_SRP_ID]) diff --git a/esphome/components/openthread/openthread.cpp b/esphome/components/openthread/openthread.cpp index b2c2519c08..db909e6b1f 100644 --- a/esphome/components/openthread/openthread.cpp +++ b/esphome/components/openthread/openthread.cpp @@ -252,6 +252,12 @@ void OpenThreadComponent::on_factory_reset(std::function callback) { ESP_LOGD(TAG, "Waiting on Confirmation Removal SRP Host and Services"); } +// set_use_address() is guaranteed to be called during component setup by Python code generation, +// so use_address_ will always be valid when get_use_address() is called - no fallback needed. +const std::string &OpenThreadComponent::get_use_address() const { return this->use_address_; } + +void OpenThreadComponent::set_use_address(const std::string &use_address) { this->use_address_ = use_address; } + } // namespace openthread } // namespace esphome diff --git a/esphome/components/openthread/openthread.h b/esphome/components/openthread/openthread.h index 5d139c633d..19dbeb4628 100644 --- a/esphome/components/openthread/openthread.h +++ b/esphome/components/openthread/openthread.h @@ -33,11 +33,15 @@ class OpenThreadComponent : public Component { void on_factory_reset(std::function callback); void defer_factory_reset_external_callback(); + const std::string &get_use_address() const; + void set_use_address(const std::string &use_address); + protected: std::optional get_omr_address_(InstanceLock &lock); bool teardown_started_{false}; bool teardown_complete_{false}; std::function factory_reset_external_callback_; + std::string use_address_; }; extern OpenThreadComponent *global_openthread_component; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables) diff --git a/esphome/core/__init__.py b/esphome/core/__init__.py index 2d49d29c5e..fed5265d6b 100644 --- a/esphome/core/__init__.py +++ b/esphome/core/__init__.py @@ -636,11 +636,9 @@ class EsphomeCore: if self.config is None: raise ValueError("Config has not been loaded yet") - if CONF_WIFI in self.config: - return self.config[CONF_WIFI][CONF_USE_ADDRESS] - - if CONF_ETHERNET in self.config: - return self.config[CONF_ETHERNET][CONF_USE_ADDRESS] + for network_type in (CONF_WIFI, CONF_ETHERNET, CONF_OPENTHREAD): + if network_type in self.config: + return self.config[network_type][CONF_USE_ADDRESS] if CONF_OPENTHREAD in self.config: return f"{self.name}.local" diff --git a/tests/unit_tests/test_core.py b/tests/unit_tests/test_core.py index 41114ae18b..92b60efd93 100644 --- a/tests/unit_tests/test_core.py +++ b/tests/unit_tests/test_core.py @@ -571,9 +571,11 @@ class TestEsphomeCore: assert target.address == "4.3.2.1" def test_address__openthread(self, target): - target.name = "test-device" target.config = {} - target.config[const.CONF_OPENTHREAD] = {} + target.config[const.CONF_OPENTHREAD] = { + const.CONF_USE_ADDRESS: "test-device.local" + } + target.name = "test-device" assert target.address == "test-device.local" From f11e8e36b5412ea09c3d69d904d4dbd3c6be8f0f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:09:10 -1000 Subject: [PATCH 307/336] missed --- esphome/components/api/api_connection.cpp | 4 ++-- esphome/components/binary/fan/binary_fan.h | 2 +- esphome/components/copy/fan/copy_fan.h | 2 +- esphome/components/demo/demo_fan.h | 24 ++++++++++++---------- esphome/components/tuya/fan/tuya_fan.h | 2 +- 5 files changed, 18 insertions(+), 16 deletions(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 7c135946f8..05a4f9e63e 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -401,7 +401,7 @@ uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *co bool is_single) { auto *fan = static_cast(entity); FanStateResponse msg; - auto traits = fan->get_traits(); + const auto &traits = fan->get_traits(); msg.state = fan->state; if (traits.supports_oscillation()) msg.oscillating = fan->oscillating; @@ -418,7 +418,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con bool is_single) { auto *fan = static_cast(entity); ListEntitiesFanResponse msg; - auto traits = fan->get_traits(); + const auto &traits = fan->get_traits(); msg.supports_oscillation = traits.supports_oscillation(); msg.supports_speed = traits.supports_speed(); msg.supports_direction = traits.supports_direction(); diff --git a/esphome/components/binary/fan/binary_fan.h b/esphome/components/binary/fan/binary_fan.h index 16bce2e6af..b87e1c5d9d 100644 --- a/esphome/components/binary/fan/binary_fan.h +++ b/esphome/components/binary/fan/binary_fan.h @@ -16,7 +16,7 @@ class BinaryFan : public Component, public fan::Fan { void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - fan::FanTraits get_traits() override; + const fan::FanTraits &get_traits() override; protected: void control(const fan::FanCall &call) override; diff --git a/esphome/components/copy/fan/copy_fan.h b/esphome/components/copy/fan/copy_fan.h index b474975bc4..194827b9f8 100644 --- a/esphome/components/copy/fan/copy_fan.h +++ b/esphome/components/copy/fan/copy_fan.h @@ -12,7 +12,7 @@ class CopyFan : public fan::Fan, public Component { void setup() override; void dump_config() override; - fan::FanTraits get_traits() override; + const fan::FanTraits &get_traits() override; protected: void control(const fan::FanCall &call) override; diff --git a/esphome/components/demo/demo_fan.h b/esphome/components/demo/demo_fan.h index 09edc4e0b7..568e90b826 100644 --- a/esphome/components/demo/demo_fan.h +++ b/esphome/components/demo/demo_fan.h @@ -16,8 +16,9 @@ enum class DemoFanType { class DemoFan : public fan::Fan, public Component { public: void set_type(DemoFanType type) { type_ = type; } - fan::FanTraits get_traits() override { - fan::FanTraits traits{}; + const fan::FanTraits &get_traits() override { + // Note: Demo fan builds traits dynamically, so we store it as a member + this->traits_ = fan::FanTraits{}; // oscillation // speed @@ -27,22 +28,22 @@ class DemoFan : public fan::Fan, public Component { case DemoFanType::TYPE_1: break; case DemoFanType::TYPE_2: - traits.set_oscillation(true); + this->traits_.set_oscillation(true); break; case DemoFanType::TYPE_3: - traits.set_direction(true); - traits.set_speed(true); - traits.set_supported_speed_count(5); + this->traits_.set_direction(true); + this->traits_.set_speed(true); + this->traits_.set_supported_speed_count(5); break; case DemoFanType::TYPE_4: - traits.set_direction(true); - traits.set_speed(true); - traits.set_supported_speed_count(100); - traits.set_oscillation(true); + this->traits_.set_direction(true); + this->traits_.set_speed(true); + this->traits_.set_supported_speed_count(100); + this->traits_.set_oscillation(true); break; } - return traits; + return this->traits_; } protected: @@ -60,6 +61,7 @@ class DemoFan : public fan::Fan, public Component { } DemoFanType type_; + fan::FanTraits traits_; }; } // namespace demo diff --git a/esphome/components/tuya/fan/tuya_fan.h b/esphome/components/tuya/fan/tuya_fan.h index 527efa8246..100579ea9f 100644 --- a/esphome/components/tuya/fan/tuya_fan.h +++ b/esphome/components/tuya/fan/tuya_fan.h @@ -17,7 +17,7 @@ class TuyaFan : public Component, public fan::Fan { void set_oscillation_id(uint8_t oscillation_id) { this->oscillation_id_ = oscillation_id; } void set_direction_id(uint8_t direction_id) { this->direction_id_ = direction_id; } - fan::FanTraits get_traits() override; + const fan::FanTraits &get_traits() override; protected: void control(const fan::FanCall &call) override; From ac36b97262bbfd60f8d67746fcae0bd93f7af3f6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:16:13 -1000 Subject: [PATCH 308/336] reduce scope --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_connection.cpp | 4 ++-- esphome/components/api/api_pb2.h | 2 +- esphome/components/binary/fan/binary_fan.h | 2 +- esphome/components/copy/fan/copy_fan.h | 3 +-- esphome/components/demo/demo_fan.h | 24 +++++++++---------- esphome/components/fan/fan.cpp | 4 ++-- esphome/components/fan/fan.h | 2 +- esphome/components/fan/fan_traits.h | 21 +++++----------- esphome/components/hbridge/fan/hbridge_fan.h | 6 ++--- esphome/components/speed/fan/speed_fan.h | 6 ++--- .../components/template/fan/template_fan.cpp | 3 +-- .../components/template/fan/template_fan.h | 6 ++--- esphome/components/tuya/fan/tuya_fan.h | 2 +- 14 files changed, 37 insertions(+), 50 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 34be6e4aa2..a4c2557ffe 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "FixedVector"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "std::vector"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 05a4f9e63e..7c135946f8 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -401,7 +401,7 @@ uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *co bool is_single) { auto *fan = static_cast(entity); FanStateResponse msg; - const auto &traits = fan->get_traits(); + auto traits = fan->get_traits(); msg.state = fan->state; if (traits.supports_oscillation()) msg.oscillating = fan->oscillating; @@ -418,7 +418,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con bool is_single) { auto *fan = static_cast(entity); ListEntitiesFanResponse msg; - const auto &traits = fan->get_traits(); + auto traits = fan->get_traits(); msg.supports_oscillation = traits.supports_oscillation(); msg.supports_speed = traits.supports_speed(); msg.supports_direction = traits.supports_direction(); diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 647dd47b89..e71ad2c64e 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const FixedVector *supported_preset_modes{}; + const std::vector *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/binary/fan/binary_fan.h b/esphome/components/binary/fan/binary_fan.h index b87e1c5d9d..16bce2e6af 100644 --- a/esphome/components/binary/fan/binary_fan.h +++ b/esphome/components/binary/fan/binary_fan.h @@ -16,7 +16,7 @@ class BinaryFan : public Component, public fan::Fan { void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - const fan::FanTraits &get_traits() override; + fan::FanTraits get_traits() override; protected: void control(const fan::FanCall &call) override; diff --git a/esphome/components/copy/fan/copy_fan.h b/esphome/components/copy/fan/copy_fan.h index 194827b9f8..e1212537f1 100644 --- a/esphome/components/copy/fan/copy_fan.h +++ b/esphome/components/copy/fan/copy_fan.h @@ -12,11 +12,10 @@ class CopyFan : public fan::Fan, public Component { void setup() override; void dump_config() override; - const fan::FanTraits &get_traits() override; + fan::FanTraits get_traits() override; protected: void control(const fan::FanCall &call) override; - ; fan::Fan *source_; }; diff --git a/esphome/components/demo/demo_fan.h b/esphome/components/demo/demo_fan.h index 568e90b826..09edc4e0b7 100644 --- a/esphome/components/demo/demo_fan.h +++ b/esphome/components/demo/demo_fan.h @@ -16,9 +16,8 @@ enum class DemoFanType { class DemoFan : public fan::Fan, public Component { public: void set_type(DemoFanType type) { type_ = type; } - const fan::FanTraits &get_traits() override { - // Note: Demo fan builds traits dynamically, so we store it as a member - this->traits_ = fan::FanTraits{}; + fan::FanTraits get_traits() override { + fan::FanTraits traits{}; // oscillation // speed @@ -28,22 +27,22 @@ class DemoFan : public fan::Fan, public Component { case DemoFanType::TYPE_1: break; case DemoFanType::TYPE_2: - this->traits_.set_oscillation(true); + traits.set_oscillation(true); break; case DemoFanType::TYPE_3: - this->traits_.set_direction(true); - this->traits_.set_speed(true); - this->traits_.set_supported_speed_count(5); + traits.set_direction(true); + traits.set_speed(true); + traits.set_supported_speed_count(5); break; case DemoFanType::TYPE_4: - this->traits_.set_direction(true); - this->traits_.set_speed(true); - this->traits_.set_supported_speed_count(100); - this->traits_.set_oscillation(true); + traits.set_direction(true); + traits.set_speed(true); + traits.set_supported_speed_count(100); + traits.set_oscillation(true); break; } - return this->traits_; + return traits; } protected: @@ -61,7 +60,6 @@ class DemoFan : public fan::Fan, public Component { } DemoFanType type_; - fan::FanTraits traits_; }; } // namespace demo diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 839b0d08cc..ea9cfd0c37 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -132,7 +132,7 @@ FanCall Fan::make_call() { return FanCall(*this); } void Fan::add_on_state_callback(std::function &&callback) { this->state_callback_.add(std::move(callback)); } void Fan::publish_state() { - const auto &traits = this->get_traits(); + auto traits = this->get_traits(); ESP_LOGD(TAG, "'%s' - Sending state:", this->name_.c_str()); ESP_LOGD(TAG, " State: %s", ONOFF(this->state)); @@ -211,7 +211,7 @@ void Fan::save_state_() { } void Fan::dump_traits_(const char *tag, const char *prefix) { - const auto &traits = this->get_traits(); + auto traits = this->get_traits(); if (traits.supports_speed()) { ESP_LOGCONFIG(tag, diff --git a/esphome/components/fan/fan.h b/esphome/components/fan/fan.h index 901181903a..b74187eb4a 100644 --- a/esphome/components/fan/fan.h +++ b/esphome/components/fan/fan.h @@ -127,7 +127,7 @@ class Fan : public EntityBase { void publish_state(); - virtual const FanTraits &get_traits() = 0; + virtual FanTraits get_traits() = 0; /// Set the restore mode of this fan. void set_restore_mode(FanRestoreMode restore_mode) { this->restore_mode_ = restore_mode; } diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index e0b64aa0fa..9e1b669a2b 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,5 +1,5 @@ #include -#include "esphome/core/helpers.h" +#include #pragma once @@ -36,18 +36,9 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - const FixedVector &supported_preset_modes() const { return this->preset_modes_; } - /// Set the preset modes supported by the fan (from initializer list). - void set_supported_preset_modes(const std::initializer_list &preset_modes) { - this->preset_modes_ = preset_modes; - } - /// Set the preset modes supported by the fan (from FixedVector). - template void set_supported_preset_modes(const T &preset_modes) { - this->preset_modes_.init(preset_modes.size()); - for (const auto &mode : preset_modes) { - this->preset_modes_.push_back(mode); - } - } + const std::vector &supported_preset_modes() const { return this->preset_modes_; } + /// Set the preset modes supported by the fan. + void set_supported_preset_modes(const std::vector &preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_.empty(); } @@ -59,13 +50,13 @@ class FanTraits { // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const FixedVector &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const std::vector &supported_preset_modes_for_api_() const { return this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - FixedVector preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 68458d7922..8562fd20be 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -21,11 +21,11 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::initializer_list &presets) { preset_modes_ = presets; } + void set_preset_modes(const std::vector &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; - const fan::FanTraits &get_traits() override { return this->traits_; } + fan::FanTraits get_traits() override { return this->traits_; } fan::FanCall brake(); @@ -37,7 +37,7 @@ class HBridgeFan : public Component, public fan::Fan { int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; fan::FanTraits traits_; - FixedVector preset_modes_{}; + std::vector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index 60c2267b04..d994ddd15e 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -17,8 +17,8 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } - const fan::FanTraits &get_traits() override { return this->traits_; } + void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } + fan::FanTraits get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -29,7 +29,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *direction_{nullptr}; int speed_count_{}; fan::FanTraits traits_; - FixedVector preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.cpp b/esphome/components/template/fan/template_fan.cpp index 477e2c4981..5f4a2ae8f7 100644 --- a/esphome/components/template/fan/template_fan.cpp +++ b/esphome/components/template/fan/template_fan.cpp @@ -15,8 +15,7 @@ void TemplateFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_); - if (!this->preset_modes_.empty()) - this->traits_.set_supported_preset_modes(this->preset_modes_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void TemplateFan::dump_config() { LOG_FAN("", "Template Fan", this); } diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 5b175b21a4..4a32c912fc 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -15,8 +15,8 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } - const fan::FanTraits &get_traits() override { return this->traits_; } + void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } + fan::FanTraits get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -25,7 +25,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_direction_{false}; int speed_count_{0}; fan::FanTraits traits_; - FixedVector preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace template_ diff --git a/esphome/components/tuya/fan/tuya_fan.h b/esphome/components/tuya/fan/tuya_fan.h index 100579ea9f..527efa8246 100644 --- a/esphome/components/tuya/fan/tuya_fan.h +++ b/esphome/components/tuya/fan/tuya_fan.h @@ -17,7 +17,7 @@ class TuyaFan : public Component, public fan::Fan { void set_oscillation_id(uint8_t oscillation_id) { this->oscillation_id_ = oscillation_id; } void set_direction_id(uint8_t direction_id) { this->direction_id_ = direction_id; } - const fan::FanTraits &get_traits() override; + fan::FanTraits get_traits() override; protected: void control(const fan::FanCall &call) override; From acd24402ddc7e7c086ed20d5e88baecb7d826824 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:16:28 -1000 Subject: [PATCH 309/336] reduce scope --- esphome/components/hbridge/fan/hbridge_fan.cpp | 3 +-- esphome/components/speed/fan/speed_fan.cpp | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/esphome/components/hbridge/fan/hbridge_fan.cpp b/esphome/components/hbridge/fan/hbridge_fan.cpp index c059783b1e..605a9d4ef3 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.cpp +++ b/esphome/components/hbridge/fan/hbridge_fan.cpp @@ -36,8 +36,7 @@ void HBridgeFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); - if (!this->preset_modes_.empty()) - this->traits_.set_supported_preset_modes(this->preset_modes_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void HBridgeFan::dump_config() { diff --git a/esphome/components/speed/fan/speed_fan.cpp b/esphome/components/speed/fan/speed_fan.cpp index 9205d3592b..57bd795416 100644 --- a/esphome/components/speed/fan/speed_fan.cpp +++ b/esphome/components/speed/fan/speed_fan.cpp @@ -15,8 +15,7 @@ void SpeedFan::setup() { // Construct traits this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); - if (!this->preset_modes_.empty()) - this->traits_.set_supported_preset_modes(this->preset_modes_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void SpeedFan::dump_config() { LOG_FAN("", "Speed Fan", this); } From 935acc7d5e0ac279f71fbd1f76211bb9fd421385 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:24:12 -1000 Subject: [PATCH 310/336] fixed --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_pb2.h | 2 +- esphome/components/fan/fan_traits.h | 15 ++++++++++----- esphome/components/hbridge/fan/hbridge_fan.h | 4 ++-- esphome/components/speed/fan/speed_fan.h | 4 ++-- esphome/components/template/fan/template_fan.h | 4 ++-- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index a4c2557ffe..34be6e4aa2 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "std::vector"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "FixedVector"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index e71ad2c64e..647dd47b89 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const std::vector *supported_preset_modes{}; + const FixedVector *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 9e1b669a2b..c37acfa67d 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,5 +1,5 @@ #include -#include +#include "esphome/core/helpers.h" #pragma once @@ -36,9 +36,14 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - const std::vector &supported_preset_modes() const { return this->preset_modes_; } + const FixedVector &supported_preset_modes() const { return this->preset_modes_; } /// Set the preset modes supported by the fan. - void set_supported_preset_modes(const std::vector &preset_modes) { this->preset_modes_ = preset_modes; } + template void set_supported_preset_modes(const T &preset_modes) { + this->preset_modes_.init(preset_modes.size()); + for (const auto &mode : preset_modes) { + this->preset_modes_.push_back(mode); + } + } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_.empty(); } @@ -50,13 +55,13 @@ class FanTraits { // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const std::vector &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const FixedVector &supported_preset_modes_for_api_() const { return this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - std::vector preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 8562fd20be..cea4f81fe5 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -21,7 +21,7 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::vector &presets) { preset_modes_ = presets; } + void set_preset_modes(const FixedVector &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; @@ -37,7 +37,7 @@ class HBridgeFan : public Component, public fan::Fan { int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; fan::FanTraits traits_; - std::vector preset_modes_{}; + FixedVector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index d994ddd15e..3ffffac231 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -17,7 +17,7 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const FixedVector &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -29,7 +29,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *direction_{nullptr}; int speed_count_{}; fan::FanTraits traits_; - std::vector preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 4a32c912fc..330f8f2565 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -15,7 +15,7 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const FixedVector &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -25,7 +25,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_direction_{false}; int speed_count_{0}; fan::FanTraits traits_; - std::vector preset_modes_{}; + FixedVector preset_modes_{}; }; } // namespace template_ From 657e6f0bce67b70dc6c6567bf63c82e526c2ba4d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:28:53 -1000 Subject: [PATCH 311/336] fixed --- esphome/components/fan/fan.cpp | 2 +- esphome/components/fan/fan.h | 16 ++++++++++++++++ esphome/components/fan/fan_traits.h | 2 ++ esphome/components/hbridge/fan/hbridge_fan.cpp | 8 +++++--- esphome/components/hbridge/fan/hbridge_fan.h | 5 +---- esphome/components/speed/fan/speed_fan.cpp | 8 +++++--- esphome/components/speed/fan/speed_fan.h | 5 +---- esphome/components/template/fan/template_fan.cpp | 8 +++++--- esphome/components/template/fan/template_fan.h | 5 +---- 9 files changed, 37 insertions(+), 22 deletions(-) diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index ea9cfd0c37..26a61de0b1 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -39,7 +39,7 @@ void FanCall::perform() { } void FanCall::validate_() { - const auto &traits = this->parent_.get_traits(); + auto traits = this->parent_.get_traits(); if (this->speed_.has_value()) { this->speed_ = clamp(*this->speed_, 1, traits.supported_speed_count()); diff --git a/esphome/components/fan/fan.h b/esphome/components/fan/fan.h index b74187eb4a..9b11a214d6 100644 --- a/esphome/components/fan/fan.h +++ b/esphome/components/fan/fan.h @@ -1,5 +1,6 @@ #pragma once +#include #include "esphome/core/entity_base.h" #include "esphome/core/helpers.h" #include "esphome/core/log.h" @@ -132,6 +133,20 @@ class Fan : public EntityBase { /// Set the restore mode of this fan. void set_restore_mode(FanRestoreMode restore_mode) { this->restore_mode_ = restore_mode; } + /// Set preset modes - helper for components + void set_preset_modes(const std::initializer_list &presets) { + this->preset_modes_.init(presets.size()); + for (const auto &mode : presets) { + this->preset_modes_.push_back(mode); + } + } + template void set_preset_modes(const T &presets) { + this->preset_modes_.init(presets.size()); + for (const auto &mode : presets) { + this->preset_modes_.push_back(mode); + } + } + protected: friend FanCall; @@ -145,6 +160,7 @@ class Fan : public EntityBase { CallbackManager state_callback_{}; ESPPreferenceObject rtc_; FanRestoreMode restore_mode_; + FixedVector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index c37acfa67d..50090f9621 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -18,6 +18,8 @@ class FanTraits { FanTraits() = default; FanTraits(bool oscillation, bool speed, bool direction, int speed_count) : oscillation_(oscillation), speed_(speed), direction_(direction), speed_count_(speed_count) {} + FanTraits(FanTraits &&) = default; + FanTraits &operator=(FanTraits &&) = default; /// Return if this fan supports oscillation. bool supports_oscillation() const { return this->oscillation_; } diff --git a/esphome/components/hbridge/fan/hbridge_fan.cpp b/esphome/components/hbridge/fan/hbridge_fan.cpp index 605a9d4ef3..56df053d57 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.cpp +++ b/esphome/components/hbridge/fan/hbridge_fan.cpp @@ -33,10 +33,12 @@ void HBridgeFan::setup() { restore->apply(*this); this->write_state_(); } +} - // Construct traits - this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); +fan::FanTraits HBridgeFan::get_traits() { + auto traits = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); + traits.set_supported_preset_modes(this->preset_modes_); + return traits; } void HBridgeFan::dump_config() { diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index cea4f81fe5..d8fa0f99cb 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -21,11 +21,10 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const FixedVector &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; - fan::FanTraits get_traits() override { return this->traits_; } + fan::FanTraits get_traits() override; fan::FanCall brake(); @@ -36,8 +35,6 @@ class HBridgeFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; - fan::FanTraits traits_; - FixedVector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.cpp b/esphome/components/speed/fan/speed_fan.cpp index 57bd795416..03d242178f 100644 --- a/esphome/components/speed/fan/speed_fan.cpp +++ b/esphome/components/speed/fan/speed_fan.cpp @@ -12,10 +12,12 @@ void SpeedFan::setup() { restore->apply(*this); this->write_state_(); } +} - // Construct traits - this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); +fan::FanTraits SpeedFan::get_traits() { + auto traits = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); + traits.set_supported_preset_modes(this->preset_modes_); + return traits; } void SpeedFan::dump_config() { LOG_FAN("", "Speed Fan", this); } diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index 3ffffac231..f29a42190e 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -17,8 +17,7 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const FixedVector &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override { return this->traits_; } + fan::FanTraits get_traits() override; protected: void control(const fan::FanCall &call) override; @@ -28,8 +27,6 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; output::BinaryOutput *direction_{nullptr}; int speed_count_{}; - fan::FanTraits traits_; - FixedVector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.cpp b/esphome/components/template/fan/template_fan.cpp index 5f4a2ae8f7..39e853fdb6 100644 --- a/esphome/components/template/fan/template_fan.cpp +++ b/esphome/components/template/fan/template_fan.cpp @@ -11,11 +11,13 @@ void TemplateFan::setup() { if (restore.has_value()) { restore->apply(*this); } +} - // Construct traits - this->traits_ = +fan::FanTraits TemplateFan::get_traits() { + auto traits = fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_); - this->traits_.set_supported_preset_modes(this->preset_modes_); + traits.set_supported_preset_modes(this->preset_modes_); + return traits; } void TemplateFan::dump_config() { LOG_FAN("", "Template Fan", this); } diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 330f8f2565..561c2de756 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -15,8 +15,7 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const FixedVector &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override { return this->traits_; } + fan::FanTraits get_traits() override; protected: void control(const fan::FanCall &call) override; @@ -24,8 +23,6 @@ class TemplateFan : public Component, public fan::Fan { bool has_oscillating_{false}; bool has_direction_{false}; int speed_count_{0}; - fan::FanTraits traits_; - FixedVector preset_modes_{}; }; } // namespace template_ From eaf0a367b4278d01a139b026d4ea886a6d6a3d06 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:37:19 -1000 Subject: [PATCH 312/336] fixed --- esphome/components/copy/fan/copy_fan.cpp | 2 +- esphome/components/fan/fan.h | 16 ------------ esphome/components/fan/fan_traits.h | 25 +++++++++---------- .../components/hbridge/fan/hbridge_fan.cpp | 4 +-- esphome/components/hbridge/fan/hbridge_fan.h | 3 +++ esphome/components/speed/fan/speed_fan.cpp | 5 ++-- esphome/components/speed/fan/speed_fan.h | 2 ++ .../components/template/fan/template_fan.cpp | 6 ++--- .../components/template/fan/template_fan.h | 2 ++ 9 files changed, 25 insertions(+), 40 deletions(-) diff --git a/esphome/components/copy/fan/copy_fan.cpp b/esphome/components/copy/fan/copy_fan.cpp index 15a7f5e025..e2b4c24dd8 100644 --- a/esphome/components/copy/fan/copy_fan.cpp +++ b/esphome/components/copy/fan/copy_fan.cpp @@ -35,7 +35,7 @@ fan::FanTraits CopyFan::get_traits() { traits.set_speed(base.supports_speed()); traits.set_supported_speed_count(base.supported_speed_count()); traits.set_direction(base.supports_direction()); - traits.set_supported_preset_modes(base.supported_preset_modes()); + traits.set_supported_preset_modes(&source_->preset_modes_); return traits; } diff --git a/esphome/components/fan/fan.h b/esphome/components/fan/fan.h index 9b11a214d6..b74187eb4a 100644 --- a/esphome/components/fan/fan.h +++ b/esphome/components/fan/fan.h @@ -1,6 +1,5 @@ #pragma once -#include #include "esphome/core/entity_base.h" #include "esphome/core/helpers.h" #include "esphome/core/log.h" @@ -133,20 +132,6 @@ class Fan : public EntityBase { /// Set the restore mode of this fan. void set_restore_mode(FanRestoreMode restore_mode) { this->restore_mode_ = restore_mode; } - /// Set preset modes - helper for components - void set_preset_modes(const std::initializer_list &presets) { - this->preset_modes_.init(presets.size()); - for (const auto &mode : presets) { - this->preset_modes_.push_back(mode); - } - } - template void set_preset_modes(const T &presets) { - this->preset_modes_.init(presets.size()); - for (const auto &mode : presets) { - this->preset_modes_.push_back(mode); - } - } - protected: friend FanCall; @@ -160,7 +145,6 @@ class Fan : public EntityBase { CallbackManager state_callback_{}; ESPPreferenceObject rtc_; FanRestoreMode restore_mode_; - FixedVector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 50090f9621..4b0113c451 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -18,8 +18,12 @@ class FanTraits { FanTraits() = default; FanTraits(bool oscillation, bool speed, bool direction, int speed_count) : oscillation_(oscillation), speed_(speed), direction_(direction), speed_count_(speed_count) {} - FanTraits(FanTraits &&) = default; - FanTraits &operator=(FanTraits &&) = default; + FanTraits(bool oscillation, bool speed, bool direction, int speed_count, const FixedVector *preset_modes) + : oscillation_(oscillation), + speed_(speed), + direction_(direction), + speed_count_(speed_count), + preset_modes_(preset_modes) {} /// Return if this fan supports oscillation. bool supports_oscillation() const { return this->oscillation_; } @@ -38,16 +42,11 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - const FixedVector &supported_preset_modes() const { return this->preset_modes_; } - /// Set the preset modes supported by the fan. - template void set_supported_preset_modes(const T &preset_modes) { - this->preset_modes_.init(preset_modes.size()); - for (const auto &mode : preset_modes) { - this->preset_modes_.push_back(mode); - } - } + const FixedVector &supported_preset_modes() const { return *this->preset_modes_; } + /// Set the preset modes pointer (points to parent Fan's preset_modes_) + void set_supported_preset_modes(const FixedVector *preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported - bool supports_preset_modes() const { return !this->preset_modes_.empty(); } + bool supports_preset_modes() const { return !this->preset_modes_->empty(); } protected: #ifdef USE_API @@ -57,13 +56,13 @@ class FanTraits { // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const FixedVector &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const FixedVector &supported_preset_modes_for_api_() const { return *this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - FixedVector preset_modes_{}; + const FixedVector *preset_modes_{nullptr}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.cpp b/esphome/components/hbridge/fan/hbridge_fan.cpp index 56df053d57..6971e11cf6 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.cpp +++ b/esphome/components/hbridge/fan/hbridge_fan.cpp @@ -36,9 +36,7 @@ void HBridgeFan::setup() { } fan::FanTraits HBridgeFan::get_traits() { - auto traits = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); - traits.set_supported_preset_modes(this->preset_modes_); - return traits; + return fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_, &this->preset_modes_); } void HBridgeFan::dump_config() { diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index d8fa0f99cb..847eca6166 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -21,6 +21,8 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } + void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } void setup() override; void dump_config() override; @@ -35,6 +37,7 @@ class HBridgeFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; + FixedVector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.cpp b/esphome/components/speed/fan/speed_fan.cpp index 03d242178f..081588286f 100644 --- a/esphome/components/speed/fan/speed_fan.cpp +++ b/esphome/components/speed/fan/speed_fan.cpp @@ -15,9 +15,8 @@ void SpeedFan::setup() { } fan::FanTraits SpeedFan::get_traits() { - auto traits = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); - traits.set_supported_preset_modes(this->preset_modes_); - return traits; + return fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_, + &this->preset_modes_); } void SpeedFan::dump_config() { LOG_FAN("", "Speed Fan", this); } diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index f29a42190e..baf0fe30f0 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -17,6 +17,7 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override; protected: @@ -27,6 +28,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; output::BinaryOutput *direction_{nullptr}; int speed_count_{}; + FixedVector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.cpp b/esphome/components/template/fan/template_fan.cpp index 39e853fdb6..94891e6a72 100644 --- a/esphome/components/template/fan/template_fan.cpp +++ b/esphome/components/template/fan/template_fan.cpp @@ -14,10 +14,8 @@ void TemplateFan::setup() { } fan::FanTraits TemplateFan::get_traits() { - auto traits = - fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_); - traits.set_supported_preset_modes(this->preset_modes_); - return traits; + return fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_, + &this->preset_modes_); } void TemplateFan::dump_config() { LOG_FAN("", "Template Fan", this); } diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 561c2de756..affb313a2e 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -15,6 +15,7 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override; protected: @@ -23,6 +24,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_oscillating_{false}; bool has_direction_{false}; int speed_count_{0}; + FixedVector preset_modes_{}; }; } // namespace template_ From 274c0505f7753a4baa7cfc4c84fc187e4b1e3f55 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:38:52 -1000 Subject: [PATCH 313/336] fixed --- esphome/components/fan/fan_traits.h | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 4b0113c451..8a25c287ab 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,8 +1,7 @@ -#include -#include "esphome/core/helpers.h" - #pragma once +#include "esphome/core/helpers.h" + namespace esphome { #ifdef USE_API @@ -43,14 +42,11 @@ class FanTraits { void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. const FixedVector &supported_preset_modes() const { return *this->preset_modes_; } - /// Set the preset modes pointer (points to parent Fan's preset_modes_) - void set_supported_preset_modes(const FixedVector *preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_->empty(); } protected: #ifdef USE_API - // The API connection is a friend class to access internal methods friend class api::APIConnection; // This method returns a reference to the internal preset modes. // It is used by the API to avoid copying data when encoding messages. From 43bcd98649efc09fc8b70d469ea58f1065699384 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:41:15 -1000 Subject: [PATCH 314/336] fixed --- esphome/components/fan/fan_traits.h | 2 ++ esphome/components/hbridge/fan/hbridge_fan.h | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 8a25c287ab..4c10ccd10a 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -42,6 +42,8 @@ class FanTraits { void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. const FixedVector &supported_preset_modes() const { return *this->preset_modes_; } + /// Set the preset modes supported by the fan. + void set_supported_preset_modes(const FixedVector *preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_->empty(); } diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 847eca6166..e4b075f759 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -21,7 +21,6 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } void setup() override; From fdb23a2c1371bd0c5ca93605d9a0d5f6e4fa3d04 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:42:31 -1000 Subject: [PATCH 315/336] fixed --- esphome/components/copy/fan/copy_fan.cpp | 2 +- esphome/components/copy/fan/copy_fan.h | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/copy/fan/copy_fan.cpp b/esphome/components/copy/fan/copy_fan.cpp index e2b4c24dd8..cf5341531a 100644 --- a/esphome/components/copy/fan/copy_fan.cpp +++ b/esphome/components/copy/fan/copy_fan.cpp @@ -35,7 +35,7 @@ fan::FanTraits CopyFan::get_traits() { traits.set_speed(base.supports_speed()); traits.set_supported_speed_count(base.supported_speed_count()); traits.set_direction(base.supports_direction()); - traits.set_supported_preset_modes(&source_->preset_modes_); + traits.set_supported_preset_modes(&base.supported_preset_modes()); return traits; } diff --git a/esphome/components/copy/fan/copy_fan.h b/esphome/components/copy/fan/copy_fan.h index e1212537f1..b474975bc4 100644 --- a/esphome/components/copy/fan/copy_fan.h +++ b/esphome/components/copy/fan/copy_fan.h @@ -16,6 +16,7 @@ class CopyFan : public fan::Fan, public Component { protected: void control(const fan::FanCall &call) override; + ; fan::Fan *source_; }; From 5c7029623e6d728441bf655609ff0394ef7209e1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:44:42 -1000 Subject: [PATCH 316/336] fixed --- esphome/components/fan/fan.cpp | 14 ++++++++------ esphome/components/fan/fan_traits.h | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 26a61de0b1..856152de63 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -50,13 +50,15 @@ void FanCall::validate_() { } if (!this->preset_mode_.empty()) { - const auto &preset_modes = traits.supported_preset_modes(); - // Linear search is efficient for small preset mode lists (typically 2-5 items) bool found = false; - for (const auto &mode : preset_modes) { - if (mode == this->preset_mode_) { - found = true; - break; + if (traits.supports_preset_modes()) { + const auto &preset_modes = traits.supported_preset_modes(); + // Linear search is efficient for small preset mode lists (typically 2-5 items) + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode_) { + found = true; + break; + } } } if (!found) { diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 4c10ccd10a..138d39bb65 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -45,7 +45,7 @@ class FanTraits { /// Set the preset modes supported by the fan. void set_supported_preset_modes(const FixedVector *preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported - bool supports_preset_modes() const { return !this->preset_modes_->empty(); } + bool supports_preset_modes() const { return this->preset_modes_ != nullptr && !this->preset_modes_->empty(); } protected: #ifdef USE_API From b0f764a37e15fc5f5ddbd973ee16b5166edce466 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:52:15 -1000 Subject: [PATCH 317/336] fixed --- esphome/components/api/api_connection.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 7c135946f8..cb480ce51a 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -423,7 +423,8 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con msg.supports_speed = traits.supports_speed(); msg.supports_direction = traits.supports_direction(); msg.supported_speed_count = traits.supported_speed_count(); - msg.supported_preset_modes = &traits.supported_preset_modes_for_api_(); + if (traits.supports_preset_modes()) + msg.supported_preset_modes = &traits.supported_preset_modes_for_api_(); return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size, is_single); } void APIConnection::fan_command(const FanCommandRequest &msg) { From 7f567bdfbe172b3fc0c60a7bd5b1d6eec4104746 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 11:53:15 -1000 Subject: [PATCH 318/336] [fan] Add basic fan compile tests (#11484) --- tests/components/fan/common.yaml | 11 +++++++++++ tests/components/fan/test.esp8266-ard.yaml | 1 + 2 files changed, 12 insertions(+) create mode 100644 tests/components/fan/common.yaml create mode 100644 tests/components/fan/test.esp8266-ard.yaml diff --git a/tests/components/fan/common.yaml b/tests/components/fan/common.yaml new file mode 100644 index 0000000000..55c2a656fd --- /dev/null +++ b/tests/components/fan/common.yaml @@ -0,0 +1,11 @@ +fan: + - platform: template + id: test_fan + name: "Test Fan" + preset_modes: + - Eco + - Sleep + - Turbo + has_oscillating: true + has_direction: true + speed_count: 3 diff --git a/tests/components/fan/test.esp8266-ard.yaml b/tests/components/fan/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/fan/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml From 26e47546737b78287359b1ebbbd8c23529792b71 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:02:20 -1000 Subject: [PATCH 319/336] fixed --- esphome/components/api/api_connection.cpp | 2 +- esphome/components/fan/fan_traits.h | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index cb480ce51a..970b6d29f4 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -424,7 +424,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con msg.supports_direction = traits.supports_direction(); msg.supported_speed_count = traits.supported_speed_count(); if (traits.supports_preset_modes()) - msg.supported_preset_modes = &traits.supported_preset_modes_for_api_(); + msg.supported_preset_modes = &traits.supported_preset_modes(); return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size, is_single); } void APIConnection::fan_command(const FanCommandRequest &msg) { diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 138d39bb65..5c2a0eb355 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -3,13 +3,6 @@ #include "esphome/core/helpers.h" namespace esphome { - -#ifdef USE_API -namespace api { -class APIConnection; -} // namespace api -#endif - namespace fan { class FanTraits { @@ -48,14 +41,6 @@ class FanTraits { bool supports_preset_modes() const { return this->preset_modes_ != nullptr && !this->preset_modes_->empty(); } protected: -#ifdef USE_API - friend class api::APIConnection; - // This method returns a reference to the internal preset modes. - // It is used by the API to avoid copying data when encoding messages. - // Warning: Do not use this method outside of the API connection code. - // It returns a reference to internal data that can be invalidated. - const FixedVector &supported_preset_modes_for_api_() const { return *this->preset_modes_; } -#endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; From 77f97270d671b6b63a9568db2ca908b0bcfc0c7b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:20:50 -1000 Subject: [PATCH 320/336] [light] Use std::initializer_list for add_effects to reduce flash overhead --- esphome/components/light/light_state.cpp | 7 ++----- esphome/components/light/light_state.h | 2 +- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/esphome/components/light/light_state.cpp b/esphome/components/light/light_state.cpp index 979dc2f5a1..7b0a698bb8 100644 --- a/esphome/components/light/light_state.cpp +++ b/esphome/components/light/light_state.cpp @@ -178,12 +178,9 @@ void LightState::set_restore_mode(LightRestoreMode restore_mode) { this->restore void LightState::set_initial_state(const LightStateRTCState &initial_state) { this->initial_state_ = initial_state; } bool LightState::supports_effects() { return !this->effects_.empty(); } const FixedVector &LightState::get_effects() const { return this->effects_; } -void LightState::add_effects(const std::vector &effects) { +void LightState::add_effects(const std::initializer_list &effects) { // Called once from Python codegen during setup with all effects from YAML config - this->effects_.init(effects.size()); - for (auto *effect : effects) { - this->effects_.push_back(effect); - } + this->effects_ = effects; } void LightState::current_values_as_binary(bool *binary) { this->current_values.as_binary(binary); } diff --git a/esphome/components/light/light_state.h b/esphome/components/light/light_state.h index a07aeb6ae5..04449e790d 100644 --- a/esphome/components/light/light_state.h +++ b/esphome/components/light/light_state.h @@ -163,7 +163,7 @@ class LightState : public EntityBase, public Component { const FixedVector &get_effects() const; /// Add effects for this light state. - void add_effects(const std::vector &effects); + void add_effects(const std::initializer_list &effects); /// Get the total number of effects available for this light. size_t get_effect_count() const { return this->effects_.size(); } From 6d1ee107426a38206e118a6818d1d937524f9725 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:24:47 -1000 Subject: [PATCH 321/336] manual copy --- esphome/components/copy/fan/copy_fan.cpp | 12 +++++++++++- esphome/components/copy/fan/copy_fan.h | 2 ++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/esphome/components/copy/fan/copy_fan.cpp b/esphome/components/copy/fan/copy_fan.cpp index cf5341531a..b939338b24 100644 --- a/esphome/components/copy/fan/copy_fan.cpp +++ b/esphome/components/copy/fan/copy_fan.cpp @@ -35,7 +35,17 @@ fan::FanTraits CopyFan::get_traits() { traits.set_speed(base.supports_speed()); traits.set_supported_speed_count(base.supported_speed_count()); traits.set_direction(base.supports_direction()); - traits.set_supported_preset_modes(&base.supported_preset_modes()); + + // Copy preset modes from source to avoid dangling pointer to temporary + if (base.supports_preset_modes()) { + const auto &source_modes = base.supported_preset_modes(); + this->preset_modes_.clear(); + for (const auto &mode : source_modes) { + this->preset_modes_.push_back(mode); + } + traits.set_supported_preset_modes(&this->preset_modes_); + } + return traits; } diff --git a/esphome/components/copy/fan/copy_fan.h b/esphome/components/copy/fan/copy_fan.h index b474975bc4..78134c6890 100644 --- a/esphome/components/copy/fan/copy_fan.h +++ b/esphome/components/copy/fan/copy_fan.h @@ -1,6 +1,7 @@ #pragma once #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/fan/fan.h" namespace esphome { @@ -19,6 +20,7 @@ class CopyFan : public fan::Fan, public Component { ; fan::Fan *source_; + FixedVector preset_modes_{}; }; } // namespace copy From c69e7f4e78655112278f1108aacafbff5288c440 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:25:35 -1000 Subject: [PATCH 322/336] init --- esphome/components/copy/fan/copy_fan.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/copy/fan/copy_fan.cpp b/esphome/components/copy/fan/copy_fan.cpp index b939338b24..9ec4d8f973 100644 --- a/esphome/components/copy/fan/copy_fan.cpp +++ b/esphome/components/copy/fan/copy_fan.cpp @@ -39,7 +39,7 @@ fan::FanTraits CopyFan::get_traits() { // Copy preset modes from source to avoid dangling pointer to temporary if (base.supports_preset_modes()) { const auto &source_modes = base.supported_preset_modes(); - this->preset_modes_.clear(); + this->preset_modes_.init(source_modes.size()); for (const auto &mode : source_modes) { this->preset_modes_.push_back(mode); } From c7aef0016a279d0e7f3b838f560baf1c9c53e1c5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:27:29 -1000 Subject: [PATCH 323/336] manual copy --- esphome/components/fan/fan.cpp | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 856152de63..774cf59e23 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -102,9 +102,10 @@ FanCall FanRestoreState::to_call(Fan &fan) { call.set_speed(this->speed); call.set_direction(this->direction); - if (fan.get_traits().supports_preset_modes()) { + auto traits = fan.get_traits(); + if (traits.supports_preset_modes()) { // Use stored preset index to get preset name - const auto &preset_modes = fan.get_traits().supported_preset_modes(); + const auto &preset_modes = traits.supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { call.set_preset_mode(preset_modes[this->preset_mode]); } @@ -117,9 +118,10 @@ void FanRestoreState::apply(Fan &fan) { fan.speed = this->speed; fan.direction = this->direction; - if (fan.get_traits().supports_preset_modes()) { + auto traits = fan.get_traits(); + if (traits.supports_preset_modes()) { // Use stored preset index to get preset name - const auto &preset_modes = fan.get_traits().supported_preset_modes(); + const auto &preset_modes = traits.supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { fan.preset_mode = preset_modes[this->preset_mode]; } @@ -198,8 +200,9 @@ void Fan::save_state_() { state.speed = this->speed; state.direction = this->direction; - if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { - const auto &preset_modes = this->get_traits().supported_preset_modes(); + auto traits = this->get_traits(); + if (traits.supports_preset_modes() && !this->preset_mode.empty()) { + const auto &preset_modes = traits.supported_preset_modes(); // Store index of current preset mode - linear search is efficient for small lists for (size_t i = 0; i < preset_modes.size(); i++) { if (preset_modes[i] == this->preset_mode) { From fe6f87718581a78f2608c0cd82e071e9b7a0fe94 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:28:51 -1000 Subject: [PATCH 324/336] manual copy --- esphome/components/fan/automation.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/esphome/components/fan/automation.h b/esphome/components/fan/automation.h index 90661c307c..cf04362477 100644 --- a/esphome/components/fan/automation.h +++ b/esphome/components/fan/automation.h @@ -58,10 +58,11 @@ template class CycleSpeedAction : public Action { void play(Ts... x) override { // check to see if fan supports speeds and is on - if (this->state_->get_traits().supported_speed_count()) { + auto traits = this->state_->get_traits(); + if (traits.supported_speed_count()) { if (this->state_->state) { int speed = this->state_->speed + 1; - int supported_speed_count = this->state_->get_traits().supported_speed_count(); + int supported_speed_count = traits.supported_speed_count(); bool off_speed_cycle = no_off_cycle_.value(x...); if (speed > supported_speed_count && off_speed_cycle) { // was running at max speed, off speed cycle enabled, so turn off From 977dd9dd340bcc71166a5d4f691354fdaaf3584e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 12:29:23 -1000 Subject: [PATCH 325/336] manual copy --- esphome/components/web_server/web_server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esphome/components/web_server/web_server.cpp b/esphome/components/web_server/web_server.cpp index 1d08ef5a35..e84bb67aba 100644 --- a/esphome/components/web_server/web_server.cpp +++ b/esphome/components/web_server/web_server.cpp @@ -723,7 +723,7 @@ std::string WebServer::fan_json(fan::Fan *obj, JsonDetail start_config) { root["speed_level"] = obj->speed; root["speed_count"] = traits.supported_speed_count(); } - if (obj->get_traits().supports_oscillation()) + if (traits.supports_oscillation()) root["oscillation"] = obj->oscillating; if (start_config == DETAIL_ALL) { this->add_sorting_info_(root, obj); From 93c555ae873dbd29ac29f18ace4615fef6c482eb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 13:18:14 -1000 Subject: [PATCH 326/336] reset --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_connection.cpp | 3 +- esphome/components/api/api_pb2.h | 2 +- esphome/components/copy/fan/copy_fan.cpp | 12 +---- esphome/components/copy/fan/copy_fan.h | 2 - esphome/components/fan/automation.h | 5 +-- esphome/components/fan/fan.cpp | 44 ++++++------------- esphome/components/fan/fan_traits.h | 35 ++++++++++----- .../components/hbridge/fan/hbridge_fan.cpp | 6 +-- esphome/components/hbridge/fan/hbridge_fan.h | 10 +++-- esphome/components/speed/fan/speed_fan.cpp | 7 ++- esphome/components/speed/fan/speed_fan.h | 10 +++-- .../components/template/fan/template_fan.cpp | 8 ++-- .../components/template/fan/template_fan.h | 10 +++-- esphome/components/web_server/web_server.cpp | 2 +- 15 files changed, 72 insertions(+), 86 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 34be6e4aa2..d202486cfa 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "FixedVector"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "std::set"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 970b6d29f4..7c135946f8 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -423,8 +423,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con msg.supports_speed = traits.supports_speed(); msg.supports_direction = traits.supports_direction(); msg.supported_speed_count = traits.supported_speed_count(); - if (traits.supports_preset_modes()) - msg.supported_preset_modes = &traits.supported_preset_modes(); + msg.supported_preset_modes = &traits.supported_preset_modes_for_api_(); return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size, is_single); } void APIConnection::fan_command(const FanCommandRequest &msg) { diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 647dd47b89..ed49498176 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const FixedVector *supported_preset_modes{}; + const std::set *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/copy/fan/copy_fan.cpp b/esphome/components/copy/fan/copy_fan.cpp index 9ec4d8f973..15a7f5e025 100644 --- a/esphome/components/copy/fan/copy_fan.cpp +++ b/esphome/components/copy/fan/copy_fan.cpp @@ -35,17 +35,7 @@ fan::FanTraits CopyFan::get_traits() { traits.set_speed(base.supports_speed()); traits.set_supported_speed_count(base.supported_speed_count()); traits.set_direction(base.supports_direction()); - - // Copy preset modes from source to avoid dangling pointer to temporary - if (base.supports_preset_modes()) { - const auto &source_modes = base.supported_preset_modes(); - this->preset_modes_.init(source_modes.size()); - for (const auto &mode : source_modes) { - this->preset_modes_.push_back(mode); - } - traits.set_supported_preset_modes(&this->preset_modes_); - } - + traits.set_supported_preset_modes(base.supported_preset_modes()); return traits; } diff --git a/esphome/components/copy/fan/copy_fan.h b/esphome/components/copy/fan/copy_fan.h index 78134c6890..b474975bc4 100644 --- a/esphome/components/copy/fan/copy_fan.h +++ b/esphome/components/copy/fan/copy_fan.h @@ -1,7 +1,6 @@ #pragma once #include "esphome/core/component.h" -#include "esphome/core/helpers.h" #include "esphome/components/fan/fan.h" namespace esphome { @@ -20,7 +19,6 @@ class CopyFan : public fan::Fan, public Component { ; fan::Fan *source_; - FixedVector preset_modes_{}; }; } // namespace copy diff --git a/esphome/components/fan/automation.h b/esphome/components/fan/automation.h index cf04362477..90661c307c 100644 --- a/esphome/components/fan/automation.h +++ b/esphome/components/fan/automation.h @@ -58,11 +58,10 @@ template class CycleSpeedAction : public Action { void play(Ts... x) override { // check to see if fan supports speeds and is on - auto traits = this->state_->get_traits(); - if (traits.supported_speed_count()) { + if (this->state_->get_traits().supported_speed_count()) { if (this->state_->state) { int speed = this->state_->speed + 1; - int supported_speed_count = traits.supported_speed_count(); + int supported_speed_count = this->state_->get_traits().supported_speed_count(); bool off_speed_cycle = no_off_cycle_.value(x...); if (speed > supported_speed_count && off_speed_cycle) { // was running at max speed, off speed cycle enabled, so turn off diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 774cf59e23..26065ed644 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -50,18 +50,8 @@ void FanCall::validate_() { } if (!this->preset_mode_.empty()) { - bool found = false; - if (traits.supports_preset_modes()) { - const auto &preset_modes = traits.supported_preset_modes(); - // Linear search is efficient for small preset mode lists (typically 2-5 items) - for (const auto &mode : preset_modes) { - if (mode == this->preset_mode_) { - found = true; - break; - } - } - } - if (!found) { + const auto &preset_modes = traits.supported_preset_modes(); + if (preset_modes.find(this->preset_mode_) == preset_modes.end()) { ESP_LOGW(TAG, "%s: Preset mode '%s' not supported", this->parent_.get_name().c_str(), this->preset_mode_.c_str()); this->preset_mode_.clear(); } @@ -102,12 +92,11 @@ FanCall FanRestoreState::to_call(Fan &fan) { call.set_speed(this->speed); call.set_direction(this->direction); - auto traits = fan.get_traits(); - if (traits.supports_preset_modes()) { + if (fan.get_traits().supports_preset_modes()) { // Use stored preset index to get preset name - const auto &preset_modes = traits.supported_preset_modes(); + const auto &preset_modes = fan.get_traits().supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { - call.set_preset_mode(preset_modes[this->preset_mode]); + call.set_preset_mode(*std::next(preset_modes.begin(), this->preset_mode)); } } return call; @@ -118,12 +107,11 @@ void FanRestoreState::apply(Fan &fan) { fan.speed = this->speed; fan.direction = this->direction; - auto traits = fan.get_traits(); - if (traits.supports_preset_modes()) { + if (fan.get_traits().supports_preset_modes()) { // Use stored preset index to get preset name - const auto &preset_modes = traits.supported_preset_modes(); + const auto &preset_modes = fan.get_traits().supported_preset_modes(); if (this->preset_mode < preset_modes.size()) { - fan.preset_mode = preset_modes[this->preset_mode]; + fan.preset_mode = *std::next(preset_modes.begin(), this->preset_mode); } } fan.publish_state(); @@ -200,16 +188,12 @@ void Fan::save_state_() { state.speed = this->speed; state.direction = this->direction; - auto traits = this->get_traits(); - if (traits.supports_preset_modes() && !this->preset_mode.empty()) { - const auto &preset_modes = traits.supported_preset_modes(); - // Store index of current preset mode - linear search is efficient for small lists - for (size_t i = 0; i < preset_modes.size(); i++) { - if (preset_modes[i] == this->preset_mode) { - state.preset_mode = i; - break; - } - } + if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { + const auto &preset_modes = this->get_traits().supported_preset_modes(); + // Store index of current preset mode + auto preset_iterator = preset_modes.find(this->preset_mode); + if (preset_iterator != preset_modes.end()) + state.preset_mode = std::distance(preset_modes.begin(), preset_iterator); } this->rtc_.save(&state); diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 5c2a0eb355..48509e5705 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,8 +1,16 @@ +#include +#include + #pragma once -#include "esphome/core/helpers.h" - namespace esphome { + +#ifdef USE_API +namespace api { +class APIConnection; +} // namespace api +#endif + namespace fan { class FanTraits { @@ -10,12 +18,6 @@ class FanTraits { FanTraits() = default; FanTraits(bool oscillation, bool speed, bool direction, int speed_count) : oscillation_(oscillation), speed_(speed), direction_(direction), speed_count_(speed_count) {} - FanTraits(bool oscillation, bool speed, bool direction, int speed_count, const FixedVector *preset_modes) - : oscillation_(oscillation), - speed_(speed), - direction_(direction), - speed_count_(speed_count), - preset_modes_(preset_modes) {} /// Return if this fan supports oscillation. bool supports_oscillation() const { return this->oscillation_; } @@ -34,18 +36,27 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - const FixedVector &supported_preset_modes() const { return *this->preset_modes_; } + std::set supported_preset_modes() const { return this->preset_modes_; } /// Set the preset modes supported by the fan. - void set_supported_preset_modes(const FixedVector *preset_modes) { this->preset_modes_ = preset_modes; } + void set_supported_preset_modes(const std::set &preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported - bool supports_preset_modes() const { return this->preset_modes_ != nullptr && !this->preset_modes_->empty(); } + bool supports_preset_modes() const { return !this->preset_modes_.empty(); } protected: +#ifdef USE_API + // The API connection is a friend class to access internal methods + friend class api::APIConnection; + // This method returns a reference to the internal preset modes set. + // It is used by the API to avoid copying data when encoding messages. + // Warning: Do not use this method outside of the API connection code. + // It returns a reference to internal data that can be invalidated. + const std::set &supported_preset_modes_for_api_() const { return this->preset_modes_; } +#endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - const FixedVector *preset_modes_{nullptr}; + std::set preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.cpp b/esphome/components/hbridge/fan/hbridge_fan.cpp index 6971e11cf6..605a9d4ef3 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.cpp +++ b/esphome/components/hbridge/fan/hbridge_fan.cpp @@ -33,10 +33,10 @@ void HBridgeFan::setup() { restore->apply(*this); this->write_state_(); } -} -fan::FanTraits HBridgeFan::get_traits() { - return fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_, &this->preset_modes_); + // Construct traits + this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, true, this->speed_count_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void HBridgeFan::dump_config() { diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index e4b075f759..4234fccae3 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -1,7 +1,8 @@ #pragma once +#include + #include "esphome/core/automation.h" -#include "esphome/core/helpers.h" #include "esphome/components/output/binary_output.h" #include "esphome/components/output/float_output.h" #include "esphome/components/fan/fan.h" @@ -21,11 +22,11 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const std::set &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; - fan::FanTraits get_traits() override; + fan::FanTraits get_traits() override { return this->traits_; } fan::FanCall brake(); @@ -36,7 +37,8 @@ class HBridgeFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; - FixedVector preset_modes_{}; + fan::FanTraits traits_; + std::set preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.cpp b/esphome/components/speed/fan/speed_fan.cpp index 081588286f..57bd795416 100644 --- a/esphome/components/speed/fan/speed_fan.cpp +++ b/esphome/components/speed/fan/speed_fan.cpp @@ -12,11 +12,10 @@ void SpeedFan::setup() { restore->apply(*this); this->write_state_(); } -} -fan::FanTraits SpeedFan::get_traits() { - return fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_, - &this->preset_modes_); + // Construct traits + this->traits_ = fan::FanTraits(this->oscillating_ != nullptr, true, this->direction_ != nullptr, this->speed_count_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void SpeedFan::dump_config() { LOG_FAN("", "Speed Fan", this); } diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index baf0fe30f0..6537bce3f6 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -1,7 +1,8 @@ #pragma once +#include + #include "esphome/core/component.h" -#include "esphome/core/helpers.h" #include "esphome/components/output/binary_output.h" #include "esphome/components/output/float_output.h" #include "esphome/components/fan/fan.h" @@ -17,8 +18,8 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override; + void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + fan::FanTraits get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -28,7 +29,8 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *oscillating_{nullptr}; output::BinaryOutput *direction_{nullptr}; int speed_count_{}; - FixedVector preset_modes_{}; + fan::FanTraits traits_; + std::set preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.cpp b/esphome/components/template/fan/template_fan.cpp index 94891e6a72..5f4a2ae8f7 100644 --- a/esphome/components/template/fan/template_fan.cpp +++ b/esphome/components/template/fan/template_fan.cpp @@ -11,11 +11,11 @@ void TemplateFan::setup() { if (restore.has_value()) { restore->apply(*this); } -} -fan::FanTraits TemplateFan::get_traits() { - return fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_, - &this->preset_modes_); + // Construct traits + this->traits_ = + fan::FanTraits(this->has_oscillating_, this->speed_count_ > 0, this->has_direction_, this->speed_count_); + this->traits_.set_supported_preset_modes(this->preset_modes_); } void TemplateFan::dump_config() { LOG_FAN("", "Template Fan", this); } diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index affb313a2e..7f5305ca48 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -1,7 +1,8 @@ #pragma once +#include + #include "esphome/core/component.h" -#include "esphome/core/helpers.h" #include "esphome/components/fan/fan.h" namespace esphome { @@ -15,8 +16,8 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } - fan::FanTraits get_traits() override; + void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + fan::FanTraits get_traits() override { return this->traits_; } protected: void control(const fan::FanCall &call) override; @@ -24,7 +25,8 @@ class TemplateFan : public Component, public fan::Fan { bool has_oscillating_{false}; bool has_direction_{false}; int speed_count_{0}; - FixedVector preset_modes_{}; + fan::FanTraits traits_; + std::set preset_modes_{}; }; } // namespace template_ diff --git a/esphome/components/web_server/web_server.cpp b/esphome/components/web_server/web_server.cpp index e84bb67aba..1d08ef5a35 100644 --- a/esphome/components/web_server/web_server.cpp +++ b/esphome/components/web_server/web_server.cpp @@ -723,7 +723,7 @@ std::string WebServer::fan_json(fan::Fan *obj, JsonDetail start_config) { root["speed_level"] = obj->speed; root["speed_count"] = traits.supported_speed_count(); } - if (traits.supports_oscillation()) + if (obj->get_traits().supports_oscillation()) root["oscillation"] = obj->oscillating; if (start_config == DETAIL_ALL) { this->add_sorting_info_(root, obj); From 39b93079e506fac4bfef9ae0f8da19bee7a360f0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 13:26:53 -1000 Subject: [PATCH 327/336] simp --- esphome/components/api/api.proto | 2 +- esphome/components/api/api_pb2.h | 2 +- esphome/components/fan/fan.cpp | 5 +++-- esphome/components/fan/fan_traits.h | 15 +++++++-------- esphome/components/hbridge/fan/hbridge_fan.h | 4 ++-- esphome/components/speed/fan/speed_fan.h | 4 ++-- esphome/components/template/fan/template_fan.h | 6 +++--- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index d202486cfa..a4c2557ffe 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "std::set"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "std::vector"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index ed49498176..e71ad2c64e 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const std::set *supported_preset_modes{}; + const std::vector *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 26065ed644..7fb19f242a 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -1,5 +1,6 @@ #include "fan.h" #include "esphome/core/log.h" +#include namespace esphome { namespace fan { @@ -51,7 +52,7 @@ void FanCall::validate_() { if (!this->preset_mode_.empty()) { const auto &preset_modes = traits.supported_preset_modes(); - if (preset_modes.find(this->preset_mode_) == preset_modes.end()) { + if (std::find(preset_modes.begin(), preset_modes.end(), this->preset_mode_) == preset_modes.end()) { ESP_LOGW(TAG, "%s: Preset mode '%s' not supported", this->parent_.get_name().c_str(), this->preset_mode_.c_str()); this->preset_mode_.clear(); } @@ -191,7 +192,7 @@ void Fan::save_state_() { if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { const auto &preset_modes = this->get_traits().supported_preset_modes(); // Store index of current preset mode - auto preset_iterator = preset_modes.find(this->preset_mode); + auto preset_iterator = std::find(preset_modes.begin(), preset_modes.end(), this->preset_mode); if (preset_iterator != preset_modes.end()) state.preset_mode = std::distance(preset_modes.begin(), preset_iterator); } diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 48509e5705..15c951b045 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,8 +1,7 @@ -#include -#include - #pragma once +#include + namespace esphome { #ifdef USE_API @@ -36,9 +35,9 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - std::set supported_preset_modes() const { return this->preset_modes_; } + const std::vector &supported_preset_modes() const { return this->preset_modes_; } /// Set the preset modes supported by the fan. - void set_supported_preset_modes(const std::set &preset_modes) { this->preset_modes_ = preset_modes; } + void set_supported_preset_modes(const std::vector &preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_.empty(); } @@ -46,17 +45,17 @@ class FanTraits { #ifdef USE_API // The API connection is a friend class to access internal methods friend class api::APIConnection; - // This method returns a reference to the internal preset modes set. + // This method returns a reference to the internal preset modes. // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const std::set &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const std::vector &supported_preset_modes_for_api_() const { return this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 4234fccae3..b5fb7f5daa 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -22,7 +22,7 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::set &presets) { preset_modes_ = presets; } + void set_preset_modes(const std::vector &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; @@ -38,7 +38,7 @@ class HBridgeFan : public Component, public fan::Fan { int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index 6537bce3f6..454b7fc136 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -18,7 +18,7 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -30,7 +30,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *direction_{nullptr}; int speed_count_{}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 7f5305ca48..5d780f61f0 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -1,6 +1,6 @@ #pragma once -#include +#include #include "esphome/core/component.h" #include "esphome/components/fan/fan.h" @@ -16,7 +16,7 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -26,7 +26,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_direction_{false}; int speed_count_{0}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace template_ From 091c12cb489f8df7aa4fd53f52f86f01891d73b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 13:29:14 -1000 Subject: [PATCH 328/336] preen --- esphome/components/fan/fan.cpp | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 7fb19f242a..cf1ec3d6ae 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -1,6 +1,5 @@ #include "fan.h" #include "esphome/core/log.h" -#include namespace esphome { namespace fan { @@ -52,7 +51,14 @@ void FanCall::validate_() { if (!this->preset_mode_.empty()) { const auto &preset_modes = traits.supported_preset_modes(); - if (std::find(preset_modes.begin(), preset_modes.end(), this->preset_mode_) == preset_modes.end()) { + bool found = false; + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode_) { + found = true; + break; + } + } + if (!found) { ESP_LOGW(TAG, "%s: Preset mode '%s' not supported", this->parent_.get_name().c_str(), this->preset_mode_.c_str()); this->preset_mode_.clear(); } @@ -192,9 +198,14 @@ void Fan::save_state_() { if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { const auto &preset_modes = this->get_traits().supported_preset_modes(); // Store index of current preset mode - auto preset_iterator = std::find(preset_modes.begin(), preset_modes.end(), this->preset_mode); - if (preset_iterator != preset_modes.end()) - state.preset_mode = std::distance(preset_modes.begin(), preset_iterator); + size_t i = 0; + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode) { + state.preset_mode = i; + break; + } + i++; + } } this->rtc_.save(&state); From 272858dfcadb27968cec25187ee8539a32fd9238 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 13:48:23 -1000 Subject: [PATCH 329/336] [light] Store effect names in flash (const char*) to save RAM --- esphome/components/e131/e131.cpp | 8 ++++---- .../light/addressable_light_effect.h | 19 +++++++++---------- esphome/components/light/base_light_effects.h | 12 ++++++------ esphome/components/light/light_call.cpp | 4 ++-- esphome/components/light/light_effect.h | 8 +++----- esphome/components/light/light_state.h | 2 +- 6 files changed, 25 insertions(+), 28 deletions(-) diff --git a/esphome/components/e131/e131.cpp b/esphome/components/e131/e131.cpp index a74fc9be4a..d18d945cec 100644 --- a/esphome/components/e131/e131.cpp +++ b/esphome/components/e131/e131.cpp @@ -80,8 +80,8 @@ void E131Component::add_effect(E131AddressableLightEffect *light_effect) { return; } - ESP_LOGD(TAG, "Registering '%s' for universes %d-%d.", light_effect->get_name().c_str(), - light_effect->get_first_universe(), light_effect->get_last_universe()); + ESP_LOGD(TAG, "Registering '%s' for universes %d-%d.", light_effect->get_name(), light_effect->get_first_universe(), + light_effect->get_last_universe()); light_effects_.insert(light_effect); @@ -95,8 +95,8 @@ void E131Component::remove_effect(E131AddressableLightEffect *light_effect) { return; } - ESP_LOGD(TAG, "Unregistering '%s' for universes %d-%d.", light_effect->get_name().c_str(), - light_effect->get_first_universe(), light_effect->get_last_universe()); + ESP_LOGD(TAG, "Unregistering '%s' for universes %d-%d.", light_effect->get_name(), light_effect->get_first_universe(), + light_effect->get_last_universe()); light_effects_.erase(light_effect); diff --git a/esphome/components/light/addressable_light_effect.h b/esphome/components/light/addressable_light_effect.h index 9caccad634..9840112040 100644 --- a/esphome/components/light/addressable_light_effect.h +++ b/esphome/components/light/addressable_light_effect.h @@ -30,7 +30,7 @@ inline static uint8_t half_sin8(uint8_t v) { return sin16_c(uint16_t(v) * 128u) class AddressableLightEffect : public LightEffect { public: - explicit AddressableLightEffect(const std::string &name) : LightEffect(name) {} + explicit AddressableLightEffect(const char *name) : LightEffect(name) {} void start_internal() override { this->get_addressable_()->set_effect_active(true); this->get_addressable_()->clear_effect_data(); @@ -57,8 +57,7 @@ class AddressableLightEffect : public LightEffect { class AddressableLambdaLightEffect : public AddressableLightEffect { public: - AddressableLambdaLightEffect(const std::string &name, - std::function f, + AddressableLambdaLightEffect(const char *name, std::function f, uint32_t update_interval) : AddressableLightEffect(name), f_(std::move(f)), update_interval_(update_interval) {} void start() override { this->initial_run_ = true; } @@ -81,7 +80,7 @@ class AddressableLambdaLightEffect : public AddressableLightEffect { class AddressableRainbowLightEffect : public AddressableLightEffect { public: - explicit AddressableRainbowLightEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableRainbowLightEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { ESPHSVColor hsv; hsv.value = 255; @@ -112,7 +111,7 @@ struct AddressableColorWipeEffectColor { class AddressableColorWipeEffect : public AddressableLightEffect { public: - explicit AddressableColorWipeEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableColorWipeEffect(const char *name) : AddressableLightEffect(name) {} void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } void set_add_led_interval(uint32_t add_led_interval) { this->add_led_interval_ = add_led_interval; } void set_reverse(bool reverse) { this->reverse_ = reverse; } @@ -165,7 +164,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect { class AddressableScanEffect : public AddressableLightEffect { public: - explicit AddressableScanEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableScanEffect(const char *name) : AddressableLightEffect(name) {} void set_move_interval(uint32_t move_interval) { this->move_interval_ = move_interval; } void set_scan_width(uint32_t scan_width) { this->scan_width_ = scan_width; } void apply(AddressableLight &it, const Color ¤t_color) override { @@ -202,7 +201,7 @@ class AddressableScanEffect : public AddressableLightEffect { class AddressableTwinkleEffect : public AddressableLightEffect { public: - explicit AddressableTwinkleEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableTwinkleEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &addressable, const Color ¤t_color) override { const uint32_t now = millis(); uint8_t pos_add = 0; @@ -244,7 +243,7 @@ class AddressableTwinkleEffect : public AddressableLightEffect { class AddressableRandomTwinkleEffect : public AddressableLightEffect { public: - explicit AddressableRandomTwinkleEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableRandomTwinkleEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { const uint32_t now = millis(); uint8_t pos_add = 0; @@ -293,7 +292,7 @@ class AddressableRandomTwinkleEffect : public AddressableLightEffect { class AddressableFireworksEffect : public AddressableLightEffect { public: - explicit AddressableFireworksEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableFireworksEffect(const char *name) : AddressableLightEffect(name) {} void start() override { auto &it = *this->get_addressable_(); it.all() = Color::BLACK; @@ -342,7 +341,7 @@ class AddressableFireworksEffect : public AddressableLightEffect { class AddressableFlickerEffect : public AddressableLightEffect { public: - explicit AddressableFlickerEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableFlickerEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { const uint32_t now = millis(); const uint8_t intensity = this->intensity_; diff --git a/esphome/components/light/base_light_effects.h b/esphome/components/light/base_light_effects.h index c74d19fe14..327c243525 100644 --- a/esphome/components/light/base_light_effects.h +++ b/esphome/components/light/base_light_effects.h @@ -17,7 +17,7 @@ inline static float random_cubic_float() { /// Pulse effect. class PulseLightEffect : public LightEffect { public: - explicit PulseLightEffect(const std::string &name) : LightEffect(name) {} + explicit PulseLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); @@ -60,7 +60,7 @@ class PulseLightEffect : public LightEffect { /// Random effect. Sets random colors every 10 seconds and slowly transitions between them. class RandomLightEffect : public LightEffect { public: - explicit RandomLightEffect(const std::string &name) : LightEffect(name) {} + explicit RandomLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); @@ -112,7 +112,7 @@ class RandomLightEffect : public LightEffect { class LambdaLightEffect : public LightEffect { public: - LambdaLightEffect(const std::string &name, std::function f, uint32_t update_interval) + LambdaLightEffect(const char *name, std::function f, uint32_t update_interval) : LightEffect(name), f_(std::move(f)), update_interval_(update_interval) {} void start() override { this->initial_run_ = true; } @@ -138,7 +138,7 @@ class LambdaLightEffect : public LightEffect { class AutomationLightEffect : public LightEffect { public: - AutomationLightEffect(const std::string &name) : LightEffect(name) {} + AutomationLightEffect(const char *name) : LightEffect(name) {} void stop() override { this->trig_->stop_action(); } void apply() override { if (!this->trig_->is_action_running()) { @@ -163,7 +163,7 @@ struct StrobeLightEffectColor { class StrobeLightEffect : public LightEffect { public: - explicit StrobeLightEffect(const std::string &name) : LightEffect(name) {} + explicit StrobeLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); if (now - this->last_switch_ < this->colors_[this->at_color_].duration) @@ -198,7 +198,7 @@ class StrobeLightEffect : public LightEffect { class FlickerLightEffect : public LightEffect { public: - explicit FlickerLightEffect(const std::string &name) : LightEffect(name) {} + explicit FlickerLightEffect(const char *name) : LightEffect(name) {} void apply() override { LightColorValues remote = this->state_->remote_values; diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index af193e1f11..f611baba71 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -156,7 +156,7 @@ void LightCall::perform() { if (this->effect_ == 0u) { effect_s = "None"; } else { - effect_s = this->parent_->effects_[this->effect_ - 1]->get_name().c_str(); + effect_s = this->parent_->effects_[this->effect_ - 1]->get_name(); } if (publish) { @@ -511,7 +511,7 @@ LightCall &LightCall::set_effect(const std::string &effect) { for (uint32_t i = 0; i < this->parent_->effects_.size(); i++) { LightEffect *e = this->parent_->effects_[i]; - if (strcasecmp(effect.c_str(), e->get_name().c_str()) == 0) { + if (strcasecmp(effect.c_str(), e->get_name()) == 0) { this->set_effect(i + 1); found = true; break; diff --git a/esphome/components/light/light_effect.h b/esphome/components/light/light_effect.h index dbaf1faf24..7b734c2001 100644 --- a/esphome/components/light/light_effect.h +++ b/esphome/components/light/light_effect.h @@ -1,7 +1,5 @@ #pragma once -#include - #include "esphome/core/component.h" namespace esphome { @@ -11,7 +9,7 @@ class LightState; class LightEffect { public: - explicit LightEffect(std::string name) : name_(std::move(name)) {} + explicit LightEffect(const char *name) : name_(name) {} /// Initialize this LightEffect. Will be called once after creation. virtual void start() {} @@ -24,7 +22,7 @@ class LightEffect { /// Apply this effect. Use the provided state for starting transitions, ... virtual void apply() = 0; - const std::string &get_name() { return this->name_; } + const char *get_name() const { return this->name_; } /// Internal method called by the LightState when this light effect is registered in it. virtual void init() {} @@ -47,7 +45,7 @@ class LightEffect { protected: LightState *state_{nullptr}; - std::string name_; + const char *name_; /// Internal method to find this effect's index in the parent light's effect list. uint32_t get_index_in_parent_() const; diff --git a/esphome/components/light/light_state.h b/esphome/components/light/light_state.h index a07aeb6ae5..502a08c635 100644 --- a/esphome/components/light/light_state.h +++ b/esphome/components/light/light_state.h @@ -177,7 +177,7 @@ class LightState : public EntityBase, public Component { return 0; } for (size_t i = 0; i < this->effects_.size(); i++) { - if (strcasecmp(effect_name.c_str(), this->effects_[i]->get_name().c_str()) == 0) { + if (strcasecmp(effect_name.c_str(), this->effects_[i]->get_name()) == 0) { return i + 1; // Effects are 1-indexed in active_effect_index_ } } From c55c0318825e6e91b92ffbeffc80c376ae98d9c9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 13:55:44 -1000 Subject: [PATCH 330/336] missed some --- esphome/components/adalight/adalight_light_effect.cpp | 2 +- esphome/components/adalight/adalight_light_effect.h | 2 +- esphome/components/e131/e131_addressable_light_effect.cpp | 6 +++--- esphome/components/e131/e131_addressable_light_effect.h | 2 +- esphome/components/wled/wled_light_effect.cpp | 2 +- esphome/components/wled/wled_light_effect.h | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/esphome/components/adalight/adalight_light_effect.cpp b/esphome/components/adalight/adalight_light_effect.cpp index 35e98d7360..4cf639a01f 100644 --- a/esphome/components/adalight/adalight_light_effect.cpp +++ b/esphome/components/adalight/adalight_light_effect.cpp @@ -9,7 +9,7 @@ static const char *const TAG = "adalight_light_effect"; static const uint32_t ADALIGHT_ACK_INTERVAL = 1000; static const uint32_t ADALIGHT_RECEIVE_TIMEOUT = 1000; -AdalightLightEffect::AdalightLightEffect(const std::string &name) : AddressableLightEffect(name) {} +AdalightLightEffect::AdalightLightEffect(const char *name) : AddressableLightEffect(name) {} void AdalightLightEffect::start() { AddressableLightEffect::start(); diff --git a/esphome/components/adalight/adalight_light_effect.h b/esphome/components/adalight/adalight_light_effect.h index 72faf44269..bb7319c99c 100644 --- a/esphome/components/adalight/adalight_light_effect.h +++ b/esphome/components/adalight/adalight_light_effect.h @@ -11,7 +11,7 @@ namespace adalight { class AdalightLightEffect : public light::AddressableLightEffect, public uart::UARTDevice { public: - AdalightLightEffect(const std::string &name); + AdalightLightEffect(const char *name); void start() override; void stop() override; diff --git a/esphome/components/e131/e131_addressable_light_effect.cpp b/esphome/components/e131/e131_addressable_light_effect.cpp index 4d1f98ab6c..780e181f04 100644 --- a/esphome/components/e131/e131_addressable_light_effect.cpp +++ b/esphome/components/e131/e131_addressable_light_effect.cpp @@ -9,7 +9,7 @@ namespace e131 { static const char *const TAG = "e131_addressable_light_effect"; static const int MAX_DATA_SIZE = (sizeof(E131Packet::values) - 1); -E131AddressableLightEffect::E131AddressableLightEffect(const std::string &name) : AddressableLightEffect(name) {} +E131AddressableLightEffect::E131AddressableLightEffect(const char *name) : AddressableLightEffect(name) {} int E131AddressableLightEffect::get_data_per_universe() const { return get_lights_per_universe() * channels_; } @@ -58,8 +58,8 @@ bool E131AddressableLightEffect::process_(int universe, const E131Packet &packet std::min(it->size(), std::min(output_offset + get_lights_per_universe(), output_offset + packet.count - 1)); auto *input_data = packet.values + 1; - ESP_LOGV(TAG, "Applying data for '%s' on %d universe, for %" PRId32 "-%d.", get_name().c_str(), universe, - output_offset, output_end); + ESP_LOGV(TAG, "Applying data for '%s' on %d universe, for %" PRId32 "-%d.", get_name(), universe, output_offset, + output_end); switch (channels_) { case E131_MONO: diff --git a/esphome/components/e131/e131_addressable_light_effect.h b/esphome/components/e131/e131_addressable_light_effect.h index 17d7bd2829..381e08163b 100644 --- a/esphome/components/e131/e131_addressable_light_effect.h +++ b/esphome/components/e131/e131_addressable_light_effect.h @@ -13,7 +13,7 @@ enum E131LightChannels { E131_MONO = 1, E131_RGB = 3, E131_RGBW = 4 }; class E131AddressableLightEffect : public light::AddressableLightEffect { public: - E131AddressableLightEffect(const std::string &name); + E131AddressableLightEffect(const char *name); void start() override; void stop() override; diff --git a/esphome/components/wled/wled_light_effect.cpp b/esphome/components/wled/wled_light_effect.cpp index 25577ccc11..d26b7a1750 100644 --- a/esphome/components/wled/wled_light_effect.cpp +++ b/esphome/components/wled/wled_light_effect.cpp @@ -28,7 +28,7 @@ const int DEFAULT_BLANK_TIME = 1000; static const char *const TAG = "wled_light_effect"; -WLEDLightEffect::WLEDLightEffect(const std::string &name) : AddressableLightEffect(name) {} +WLEDLightEffect::WLEDLightEffect(const char *name) : AddressableLightEffect(name) {} void WLEDLightEffect::start() { AddressableLightEffect::start(); diff --git a/esphome/components/wled/wled_light_effect.h b/esphome/components/wled/wled_light_effect.h index a591e1fd1a..6da5f4e9f9 100644 --- a/esphome/components/wled/wled_light_effect.h +++ b/esphome/components/wled/wled_light_effect.h @@ -15,7 +15,7 @@ namespace wled { class WLEDLightEffect : public light::AddressableLightEffect { public: - WLEDLightEffect(const std::string &name); + WLEDLightEffect(const char *name); void start() override; void stop() override; From 6a2b305eb2011f5e84c66f92691f45d13fc32fe7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 14:57:32 -1000 Subject: [PATCH 331/336] [ethernet] Add RMII GPIO pin conflict validation --- esphome/components/ethernet/__init__.py | 48 +++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index 7384bb26d3..8349df976b 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -32,6 +32,7 @@ from esphome.const import ( CONF_MISO_PIN, CONF_MODE, CONF_MOSI_PIN, + CONF_NUMBER, CONF_PAGE_ID, CONF_PIN, CONF_POLLING_INTERVAL, @@ -52,12 +53,24 @@ from esphome.core import ( coroutine_with_priority, ) import esphome.final_validate as fv +from esphome.types import ConfigType CONFLICTS_WITH = ["wifi"] DEPENDENCIES = ["esp32"] AUTO_LOAD = ["network"] LOGGER = logging.getLogger(__name__) +# RMII pins that are hardcoded on ESP32 and cannot be changed +# These pins are used by the internal Ethernet MAC when using RMII PHYs +ESP32_RMII_FIXED_PINS = { + 19: "EMAC_TXD0", + 21: "EMAC_TX_EN", + 22: "EMAC_TXD1", + 25: "EMAC_RXD0", + 26: "EMAC_RXD1", + 27: "EMAC_RX_CRS_DV", +} + ethernet_ns = cg.esphome_ns.namespace("ethernet") PHYRegister = ethernet_ns.struct("PHYRegister") CONF_PHY_ADDR = "phy_addr" @@ -383,3 +396,38 @@ async def to_code(config): if CORE.using_arduino: cg.add_library("WiFi", None) + + +def _final_validate_rmii_pins(config: ConfigType) -> None: + """Validate that RMII pins are not used by other components.""" + # Only validate for RMII-based PHYs on ESP32/ESP32P4 + if config[CONF_TYPE] in SPI_ETHERNET_TYPES or config[CONF_TYPE] == "OPENETH": + return # SPI and OPENETH don't use RMII + + variant = get_esp32_variant() + if variant not in (VARIANT_ESP32, VARIANT_ESP32P4): + return # Only ESP32 classic and P4 have RMII + + # Check each RMII pin against the pin registry + for pin_num, pin_function in ESP32_RMII_FIXED_PINS.items(): + # Check if this pin is used by any component + for pin_list in pins.PIN_SCHEMA_REGISTRY.pins_used.values(): + for pin_path, _, pin_config in pin_list: + if pin_config.get(CONF_NUMBER) == pin_num: + # Found a conflict - show helpful error message + component_path = ".".join(str(p) for p in pin_path) + raise cv.Invalid( + f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " + f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " + f"Please choose a different GPIO pin for '{component_path}'.", + path=pin_path, + ) + + +def _final_validate(config: ConfigType) -> ConfigType: + """Final validation for Ethernet component.""" + _final_validate_rmii_pins(config) + return config + + +FINAL_VALIDATE_SCHEMA = _final_validate From 64e3e1ef826b92693960a876728aa2331e3283ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 15:00:36 -1000 Subject: [PATCH 332/336] preen --- esphome/components/ethernet/__init__.py | 29 +++++++++++++------------ 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index 8349df976b..e32f06d059 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -408,20 +408,21 @@ def _final_validate_rmii_pins(config: ConfigType) -> None: if variant not in (VARIANT_ESP32, VARIANT_ESP32P4): return # Only ESP32 classic and P4 have RMII - # Check each RMII pin against the pin registry - for pin_num, pin_function in ESP32_RMII_FIXED_PINS.items(): - # Check if this pin is used by any component - for pin_list in pins.PIN_SCHEMA_REGISTRY.pins_used.values(): - for pin_path, _, pin_config in pin_list: - if pin_config.get(CONF_NUMBER) == pin_num: - # Found a conflict - show helpful error message - component_path = ".".join(str(p) for p in pin_path) - raise cv.Invalid( - f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " - f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " - f"Please choose a different GPIO pin for '{component_path}'.", - path=pin_path, - ) + # Check all used pins against RMII reserved pins + for pin_list in pins.PIN_SCHEMA_REGISTRY.pins_used.values(): + for pin_path, _, pin_config in pin_list: + pin_num = pin_config.get(CONF_NUMBER) + if pin_num not in ESP32_RMII_FIXED_PINS: + continue + # Found a conflict - show helpful error message + pin_function = ESP32_RMII_FIXED_PINS[pin_num] + component_path = ".".join(str(p) for p in pin_path) + raise cv.Invalid( + f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " + f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " + f"Please choose a different GPIO pin for '{component_path}'.", + path=pin_path, + ) def _final_validate(config: ConfigType) -> ConfigType: From c6de86bfb14a046577a03e2fadea2f154ee72290 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 15:08:12 -1000 Subject: [PATCH 333/336] tests --- tests/components/ethernet/common-dp83848.yaml | 4 ++-- tests/components/ethernet/common-ip101.yaml | 4 ++-- tests/components/ethernet/common-jl1101.yaml | 4 ++-- tests/components/ethernet/common-ksz8081.yaml | 4 ++-- tests/components/ethernet/common-ksz8081rna.yaml | 4 ++-- tests/components/ethernet/common-lan8670.yaml | 4 ++-- tests/components/ethernet/common-lan8720.yaml | 4 ++-- tests/components/ethernet/common-rtl8201.yaml | 4 ++-- tests/components/ethernet_info/common.yaml | 4 ++-- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/tests/components/ethernet/common-dp83848.yaml b/tests/components/ethernet/common-dp83848.yaml index 7cedfeaf08..f9069c5fb9 100644 --- a/tests/components/ethernet/common-dp83848.yaml +++ b/tests/components/ethernet/common-dp83848.yaml @@ -1,12 +1,12 @@ ethernet: type: DP83848 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ip101.yaml b/tests/components/ethernet/common-ip101.yaml index 2dece15171..cea7a5cc35 100644 --- a/tests/components/ethernet/common-ip101.yaml +++ b/tests/components/ethernet/common-ip101.yaml @@ -1,12 +1,12 @@ ethernet: type: IP101 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-jl1101.yaml b/tests/components/ethernet/common-jl1101.yaml index b6ea884102..7b0a2dfdc4 100644 --- a/tests/components/ethernet/common-jl1101.yaml +++ b/tests/components/ethernet/common-jl1101.yaml @@ -1,12 +1,12 @@ ethernet: type: JL1101 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ksz8081.yaml b/tests/components/ethernet/common-ksz8081.yaml index f70d42319e..65541832c2 100644 --- a/tests/components/ethernet/common-ksz8081.yaml +++ b/tests/components/ethernet/common-ksz8081.yaml @@ -1,12 +1,12 @@ ethernet: type: KSZ8081 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ksz8081rna.yaml b/tests/components/ethernet/common-ksz8081rna.yaml index 18efdae0e1..f04cba15b2 100644 --- a/tests/components/ethernet/common-ksz8081rna.yaml +++ b/tests/components/ethernet/common-ksz8081rna.yaml @@ -1,12 +1,12 @@ ethernet: type: KSZ8081RNA mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-lan8670.yaml b/tests/components/ethernet/common-lan8670.yaml index ec2f24273d..fb751ebd23 100644 --- a/tests/components/ethernet/common-lan8670.yaml +++ b/tests/components/ethernet/common-lan8670.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8670 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-lan8720.yaml b/tests/components/ethernet/common-lan8720.yaml index 204c1d9210..838d57df28 100644 --- a/tests/components/ethernet/common-lan8720.yaml +++ b/tests/components/ethernet/common-lan8720.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8720 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-rtl8201.yaml b/tests/components/ethernet/common-rtl8201.yaml index 8b9f2b86f2..0e7cbe73c6 100644 --- a/tests/components/ethernet/common-rtl8201.yaml +++ b/tests/components/ethernet/common-rtl8201.yaml @@ -1,12 +1,12 @@ ethernet: type: RTL8201 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet_info/common.yaml b/tests/components/ethernet_info/common.yaml index f45f345316..b720521d10 100644 --- a/tests/components/ethernet_info/common.yaml +++ b/tests/components/ethernet_info/common.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8720 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 From a050ff6ac342ee97a294bd2dc577212a36d37574 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 15:17:23 -1000 Subject: [PATCH 334/336] preen --- esphome/components/ethernet/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index e32f06d059..af9938678a 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -286,7 +286,7 @@ CONFIG_SCHEMA = cv.All( ) -def _final_validate(config): +def _final_validate_spi(config): if config[CONF_TYPE] not in SPI_ETHERNET_TYPES: return if spi_configs := fv.full_config.get().get(CONF_SPI): @@ -305,9 +305,6 @@ def _final_validate(config): ) -FINAL_VALIDATE_SCHEMA = _final_validate - - def manual_ip(config): return cg.StructInitializer( ManualIP, @@ -427,6 +424,7 @@ def _final_validate_rmii_pins(config: ConfigType) -> None: def _final_validate(config: ConfigType) -> ConfigType: """Final validation for Ethernet component.""" + _final_validate_spi(config) _final_validate_rmii_pins(config) return config From 3112c06f1d0d2fd812a61e4b021a0b2efaee53b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 16:07:46 -1000 Subject: [PATCH 335/336] handle p4 --- esphome/components/ethernet/__init__.py | 47 +++++++++++++++++++------ 1 file changed, 36 insertions(+), 11 deletions(-) diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index af9938678a..cbd2f07cae 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -60,7 +60,7 @@ DEPENDENCIES = ["esp32"] AUTO_LOAD = ["network"] LOGGER = logging.getLogger(__name__) -# RMII pins that are hardcoded on ESP32 and cannot be changed +# RMII pins that are hardcoded on ESP32 classic and cannot be changed # These pins are used by the internal Ethernet MAC when using RMII PHYs ESP32_RMII_FIXED_PINS = { 19: "EMAC_TXD0", @@ -71,6 +71,18 @@ ESP32_RMII_FIXED_PINS = { 27: "EMAC_RX_CRS_DV", } +# RMII default pins for ESP32-P4 +# These are the default pins used by ESP-IDF and are configurable in principle, +# but ESPHome's ethernet component currently has no way to change them +ESP32P4_RMII_DEFAULT_PINS = { + 34: "EMAC_TXD0", + 35: "EMAC_TXD1", + 28: "EMAC_RX_CRS_DV", + 29: "EMAC_RXD0", + 30: "EMAC_RXD1", + 49: "EMAC_TX_EN", +} + ethernet_ns = cg.esphome_ns.namespace("ethernet") PHYRegister = ethernet_ns.struct("PHYRegister") CONF_PHY_ADDR = "phy_addr" @@ -402,24 +414,37 @@ def _final_validate_rmii_pins(config: ConfigType) -> None: return # SPI and OPENETH don't use RMII variant = get_esp32_variant() - if variant not in (VARIANT_ESP32, VARIANT_ESP32P4): - return # Only ESP32 classic and P4 have RMII + if variant == VARIANT_ESP32: + rmii_pins = ESP32_RMII_FIXED_PINS + is_configurable = False + elif variant == VARIANT_ESP32P4: + rmii_pins = ESP32P4_RMII_DEFAULT_PINS + is_configurable = True + else: + return # No RMII validation needed for other variants # Check all used pins against RMII reserved pins for pin_list in pins.PIN_SCHEMA_REGISTRY.pins_used.values(): for pin_path, _, pin_config in pin_list: pin_num = pin_config.get(CONF_NUMBER) - if pin_num not in ESP32_RMII_FIXED_PINS: + if pin_num not in rmii_pins: continue # Found a conflict - show helpful error message - pin_function = ESP32_RMII_FIXED_PINS[pin_num] + pin_function = rmii_pins[pin_num] component_path = ".".join(str(p) for p in pin_path) - raise cv.Invalid( - f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " - f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " - f"Please choose a different GPIO pin for '{component_path}'.", - path=pin_path, - ) + if is_configurable: + error_msg = ( + f"GPIO{pin_num} is used by Ethernet RMII ({pin_function}) with the current default configuration. " + f"This conflicts with '{component_path}'. " + f"Please choose a different GPIO pin for '{component_path}'." + ) + else: + error_msg = ( + f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " + f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " + f"Please choose a different GPIO pin for '{component_path}'." + ) + raise cv.Invalid(error_msg, path=pin_path) def _final_validate(config: ConfigType) -> ConfigType: From f5b995a454d8138b725d2fe24c05518b8781034b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Oct 2025 16:11:37 -1000 Subject: [PATCH 336/336] preen --- esphome/components/ethernet/__init__.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index cbd2f07cae..77f70a3630 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -434,15 +434,19 @@ def _final_validate_rmii_pins(config: ConfigType) -> None: component_path = ".".join(str(p) for p in pin_path) if is_configurable: error_msg = ( - f"GPIO{pin_num} is used by Ethernet RMII ({pin_function}) with the current default configuration. " - f"This conflicts with '{component_path}'. " - f"Please choose a different GPIO pin for '{component_path}'." + f"GPIO{pin_num} is used by Ethernet RMII " + f"({pin_function}) with the current default " + f"configuration. This conflicts with '{component_path}'. " + f"Please choose a different GPIO pin for " + f"'{component_path}'." ) else: error_msg = ( - f"GPIO{pin_num} is reserved for Ethernet RMII ({pin_function}) and cannot be used. " - f"This pin is hardcoded by ESP-IDF and cannot be changed when using RMII Ethernet PHYs. " - f"Please choose a different GPIO pin for '{component_path}'." + f"GPIO{pin_num} is reserved for Ethernet RMII " + f"({pin_function}) and cannot be used. This pin is " + f"hardcoded by ESP-IDF and cannot be changed when using " + f"RMII Ethernet PHYs. Please choose a different GPIO pin " + f"for '{component_path}'." ) raise cv.Invalid(error_msg, path=pin_path)