mirror of
				https://github.com/esphome/esphome.git
				synced 2025-11-04 09:01:49 +00:00 
			
		
		
		
	Compare commits
	
		
			37 Commits
		
	
	
		
			2025.10.3
			...
			wifi_fixed
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					347501d895 | ||
| 
						 | 
					4c00861760 | ||
| 
						 | 
					2ff3e7fb2b | ||
| 
						 | 
					b0c20d7adb | ||
| 
						 | 
					2cc5e24b38 | ||
| 
						 | 
					3afa73b449 | ||
| 
						 | 
					dcf2697a2a | ||
| 
						 | 
					6a11700a6b | ||
| 
						 | 
					9bd9b043c8 | ||
| 
						 | 
					cb602c9b1a | ||
| 
						 | 
					b54beb357a | ||
| 
						 | 
					6abc2efd96 | ||
| 
						 | 
					be51093a7e | ||
| 
						 | 
					52219c4dcc | ||
| 
						 | 
					590cae13c0 | ||
| 
						 | 
					e15429b0f5 | ||
| 
						 | 
					b5cc668a45 | ||
| 
						 | 
					a1b0ae78e0 | ||
| 
						 | 
					fcc8a809e6 | ||
| 
						 | 
					48474c0f8c | ||
| 
						 | 
					9f9c95dd09 | ||
| 
						 | 
					a74fcbc8b6 | ||
| 
						 | 
					c8b898f9c5 | ||
| 
						 | 
					81bf2688b4 | ||
| 
						 | 
					87d2c9868f | ||
| 
						 | 
					5ca407e27c | ||
| 
						 | 
					5bbc2ab482 | ||
| 
						 | 
					309e8b4c92 | ||
| 
						 | 
					eee2987c99 | ||
| 
						 | 
					061e55f8c5 | ||
| 
						 | 
					56334b7832 | ||
| 
						 | 
					a4b7e0c700 | ||
| 
						 | 
					84ad7ee0e4 | ||
| 
						 | 
					d006008539 | ||
| 
						 | 
					6bb1e4c9c0 | ||
| 
						 | 
					82bdb08884 | ||
| 
						 | 
					b709ff84c3 | 
@@ -186,6 +186,11 @@ This document provides essential context for AI models interacting with this pro
 | 
			
		||||
        └── components/[component]/ # Component-specific tests
 | 
			
		||||
        ```
 | 
			
		||||
        Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
 | 
			
		||||
    *   **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
 | 
			
		||||
        ```bash
 | 
			
		||||
        ./script/test_component_grouping.py -e config --all
 | 
			
		||||
        ```
 | 
			
		||||
        This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
 | 
			
		||||
*   **Debugging and Troubleshooting:**
 | 
			
		||||
    *   **Debug Tools:**
 | 
			
		||||
        - `esphome config <file>.yaml` to validate configuration.
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										100
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										100
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							@@ -177,6 +177,7 @@ jobs:
 | 
			
		||||
      clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
 | 
			
		||||
      python-linters: ${{ steps.determine.outputs.python-linters }}
 | 
			
		||||
      changed-components: ${{ steps.determine.outputs.changed-components }}
 | 
			
		||||
      changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
 | 
			
		||||
      component-test-count: ${{ steps.determine.outputs.component-test-count }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
@@ -204,6 +205,7 @@ jobs:
 | 
			
		||||
          echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
 | 
			
		||||
          echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
 | 
			
		||||
 | 
			
		||||
  integration-tests:
 | 
			
		||||
@@ -367,12 +369,13 @@ jobs:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      max-parallel: 2
 | 
			
		||||
      matrix:
 | 
			
		||||
        file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
 | 
			
		||||
        file: ${{ fromJson(needs.determine-jobs.outputs.changed-components-with-tests) }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Install dependencies
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt-get update
 | 
			
		||||
          sudo apt-get install libsdl2-dev
 | 
			
		||||
      - name: Cache apt packages
 | 
			
		||||
        uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
 | 
			
		||||
        with:
 | 
			
		||||
          packages: libsdl2-dev
 | 
			
		||||
          version: 1.0
 | 
			
		||||
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
			
		||||
@@ -381,17 +384,17 @@ jobs:
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: test_build_components -e config -c ${{ matrix.file }}
 | 
			
		||||
      - name: Validate config for ${{ matrix.file }}
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          ./script/test_build_components -e config -c ${{ matrix.file }}
 | 
			
		||||
      - name: test_build_components -e compile -c ${{ matrix.file }}
 | 
			
		||||
          python3 script/test_build_components.py -e config -c ${{ matrix.file }}
 | 
			
		||||
      - name: Compile config for ${{ matrix.file }}
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          ./script/test_build_components -e compile -c ${{ matrix.file }}
 | 
			
		||||
          python3 script/test_build_components.py -e compile -c ${{ matrix.file }}
 | 
			
		||||
 | 
			
		||||
  test-build-components-splitter:
 | 
			
		||||
    name: Split components for testing into 10 components per group
 | 
			
		||||
    name: Split components for intelligent grouping (40 weighted per batch)
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
@@ -402,14 +405,26 @@ jobs:
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
			
		||||
      - name: Split components into groups of 10
 | 
			
		||||
      - name: Restore Python
 | 
			
		||||
        uses: ./.github/actions/restore-python
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Split components intelligently based on bus configurations
 | 
			
		||||
        id: split
 | 
			
		||||
        run: |
 | 
			
		||||
          components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(10) | join(" ")]')
 | 
			
		||||
          echo "components=$components" >> $GITHUB_OUTPUT
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
 | 
			
		||||
          # Use intelligent splitter that groups components with same bus configs
 | 
			
		||||
          components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
 | 
			
		||||
 | 
			
		||||
          echo "Splitting components intelligently..."
 | 
			
		||||
          output=$(python3 script/split_components_for_ci.py --components "$components" --batch-size 40 --output github)
 | 
			
		||||
 | 
			
		||||
          echo "$output" >> $GITHUB_OUTPUT
 | 
			
		||||
 | 
			
		||||
  test-build-components-split:
 | 
			
		||||
    name: Test split components
 | 
			
		||||
    name: Test components batch (${{ matrix.components }})
 | 
			
		||||
    runs-on: ubuntu-24.04
 | 
			
		||||
    needs:
 | 
			
		||||
      - common
 | 
			
		||||
@@ -418,17 +433,23 @@ jobs:
 | 
			
		||||
    if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
 | 
			
		||||
    strategy:
 | 
			
		||||
      fail-fast: false
 | 
			
		||||
      max-parallel: 4
 | 
			
		||||
      max-parallel: 5
 | 
			
		||||
      matrix:
 | 
			
		||||
        components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
 | 
			
		||||
    steps:
 | 
			
		||||
      - name: Show disk space
 | 
			
		||||
        run: |
 | 
			
		||||
          echo "Available disk space:"
 | 
			
		||||
          df -h
 | 
			
		||||
 | 
			
		||||
      - name: List components
 | 
			
		||||
        run: echo ${{ matrix.components }}
 | 
			
		||||
 | 
			
		||||
      - name: Install dependencies
 | 
			
		||||
        run: |
 | 
			
		||||
          sudo apt-get update
 | 
			
		||||
          sudo apt-get install libsdl2-dev
 | 
			
		||||
      - name: Cache apt packages
 | 
			
		||||
        uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
 | 
			
		||||
        with:
 | 
			
		||||
          packages: libsdl2-dev
 | 
			
		||||
          version: 1.0
 | 
			
		||||
 | 
			
		||||
      - name: Check out code from GitHub
 | 
			
		||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
			
		||||
@@ -437,20 +458,37 @@ jobs:
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
			
		||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
			
		||||
      - name: Validate config
 | 
			
		||||
      - name: Validate and compile components with intelligent grouping
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          for component in ${{ matrix.components }}; do
 | 
			
		||||
            ./script/test_build_components -e config -c $component
 | 
			
		||||
          done
 | 
			
		||||
      - name: Compile config
 | 
			
		||||
        run: |
 | 
			
		||||
          . venv/bin/activate
 | 
			
		||||
          mkdir build_cache
 | 
			
		||||
          export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
 | 
			
		||||
          for component in ${{ matrix.components }}; do
 | 
			
		||||
            ./script/test_build_components -e compile -c $component
 | 
			
		||||
          done
 | 
			
		||||
          # Use /mnt for build files (70GB available vs ~29GB on /)
 | 
			
		||||
          # Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
 | 
			
		||||
          sudo mkdir -p /mnt/platformio
 | 
			
		||||
          sudo chown $USER:$USER /mnt/platformio
 | 
			
		||||
          mkdir -p ~/.platformio
 | 
			
		||||
          sudo mount --bind /mnt/platformio ~/.platformio
 | 
			
		||||
 | 
			
		||||
          # Bind mount test build directory to /mnt
 | 
			
		||||
          sudo mkdir -p /mnt/test_build_components_build
 | 
			
		||||
          sudo chown $USER:$USER /mnt/test_build_components_build
 | 
			
		||||
          mkdir -p tests/test_build_components/build
 | 
			
		||||
          sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
 | 
			
		||||
 | 
			
		||||
          # Convert space-separated components to comma-separated for Python script
 | 
			
		||||
          components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
 | 
			
		||||
 | 
			
		||||
          echo "Testing components: $components_csv"
 | 
			
		||||
          echo ""
 | 
			
		||||
 | 
			
		||||
          # Run config validation with grouping
 | 
			
		||||
          python3 script/test_build_components.py -e config -c "$components_csv" -f
 | 
			
		||||
 | 
			
		||||
          echo ""
 | 
			
		||||
          echo "Config validation passed! Starting compilation..."
 | 
			
		||||
          echo ""
 | 
			
		||||
 | 
			
		||||
          # Run compilation with grouping
 | 
			
		||||
          python3 script/test_build_components.py -e compile -c "$components_csv" -f
 | 
			
		||||
 | 
			
		||||
  pre-commit-ci-lite:
 | 
			
		||||
    name: pre-commit.ci lite
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							@@ -58,7 +58,7 @@ jobs:
 | 
			
		||||
 | 
			
		||||
      # Initializes the CodeQL tools for scanning.
 | 
			
		||||
      - name: Initialize CodeQL
 | 
			
		||||
        uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
 | 
			
		||||
        uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
 | 
			
		||||
        with:
 | 
			
		||||
          languages: ${{ matrix.language }}
 | 
			
		||||
          build-mode: ${{ matrix.build-mode }}
 | 
			
		||||
@@ -86,6 +86,6 @@ jobs:
 | 
			
		||||
          exit 1
 | 
			
		||||
 | 
			
		||||
      - name: Perform CodeQL Analysis
 | 
			
		||||
        uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
 | 
			
		||||
        uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
 | 
			
		||||
        with:
 | 
			
		||||
          category: "/language:${{matrix.language}}"
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/stale.yml
									
									
									
									
										vendored
									
									
								
							@@ -23,7 +23,7 @@ jobs:
 | 
			
		||||
        with:
 | 
			
		||||
          debug-only: ${{ github.ref != 'refs/heads/dev' }} # Dry-run when not run on dev branch
 | 
			
		||||
          remove-stale-when-updated: true
 | 
			
		||||
          operations-per-run: 150
 | 
			
		||||
          operations-per-run: 400
 | 
			
		||||
 | 
			
		||||
          # The 90 day stale policy for PRs
 | 
			
		||||
          # - PRs
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							@@ -48,7 +48,7 @@ PROJECT_NAME           = ESPHome
 | 
			
		||||
# could be handy for archiving the generated documentation or if some version
 | 
			
		||||
# control system is used.
 | 
			
		||||
 | 
			
		||||
PROJECT_NUMBER         = 2025.10.0b1
 | 
			
		||||
PROJECT_NUMBER         = 2025.11.0-dev
 | 
			
		||||
 | 
			
		||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
 | 
			
		||||
# for a project that appears at the top of each page and should give viewer a
 | 
			
		||||
 
 | 
			
		||||
@@ -1002,6 +1002,12 @@ def parse_args(argv):
 | 
			
		||||
        action="append",
 | 
			
		||||
        default=[],
 | 
			
		||||
    )
 | 
			
		||||
    options_parser.add_argument(
 | 
			
		||||
        "--testing-mode",
 | 
			
		||||
        help="Enable testing mode (disables validation checks for grouped component testing)",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        default=False,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description=f"ESPHome {const.__version__}", parents=[options_parser]
 | 
			
		||||
@@ -1260,6 +1266,7 @@ def run_esphome(argv):
 | 
			
		||||
 | 
			
		||||
    args = parse_args(argv)
 | 
			
		||||
    CORE.dashboard = args.dashboard
 | 
			
		||||
    CORE.testing_mode = args.testing_mode
 | 
			
		||||
 | 
			
		||||
    # Create address cache from command-line arguments
 | 
			
		||||
    CORE.address_cache = AddressCache.from_cli_args(
 | 
			
		||||
 
 | 
			
		||||
@@ -105,9 +105,9 @@ class Canbus : public Component {
 | 
			
		||||
  CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
 | 
			
		||||
      callback_manager_{};
 | 
			
		||||
 | 
			
		||||
  virtual bool setup_internal();
 | 
			
		||||
  virtual Error send_message(struct CanFrame *frame);
 | 
			
		||||
  virtual Error read_message(struct CanFrame *frame);
 | 
			
		||||
  virtual bool setup_internal() = 0;
 | 
			
		||||
  virtual Error send_message(struct CanFrame *frame) = 0;
 | 
			
		||||
  virtual Error read_message(struct CanFrame *frame) = 0;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
template<typename... Ts> class CanbusSendAction : public Action<Ts...>, public Parented<Canbus> {
 | 
			
		||||
 
 | 
			
		||||
@@ -5,7 +5,7 @@ namespace dashboard_import {
 | 
			
		||||
 | 
			
		||||
static std::string g_package_import_url;  // NOLINT
 | 
			
		||||
 | 
			
		||||
std::string get_package_import_url() { return g_package_import_url; }
 | 
			
		||||
const std::string &get_package_import_url() { return g_package_import_url; }
 | 
			
		||||
void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
 | 
			
		||||
 | 
			
		||||
}  // namespace dashboard_import
 | 
			
		||||
 
 | 
			
		||||
@@ -5,7 +5,7 @@
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace dashboard_import {
 | 
			
		||||
 | 
			
		||||
std::string get_package_import_url();
 | 
			
		||||
const std::string &get_package_import_url();
 | 
			
		||||
void set_package_import_url(std::string url);
 | 
			
		||||
 | 
			
		||||
}  // namespace dashboard_import
 | 
			
		||||
 
 | 
			
		||||
@@ -304,6 +304,17 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
 | 
			
		||||
    return f"pioarduino/framework-espidf@https://github.com/pioarduino/esp-idf/releases/download/v{str(ver)}/esp-idf-v{str(ver)}.zip"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _is_framework_url(source: str) -> str:
 | 
			
		||||
    # platformio accepts many URL schemes for framework repositories and archives including http, https, git, file, and symlink
 | 
			
		||||
    import urllib.parse
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        parsed = urllib.parse.urlparse(source)
 | 
			
		||||
    except ValueError:
 | 
			
		||||
        return False
 | 
			
		||||
    return bool(parsed.scheme)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# NOTE: Keep this in mind when updating the recommended version:
 | 
			
		||||
#  * New framework historically have had some regressions, especially for WiFi.
 | 
			
		||||
#    The new version needs to be thoroughly validated before changing the
 | 
			
		||||
@@ -314,11 +325,12 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
 | 
			
		||||
#  - https://github.com/espressif/arduino-esp32/releases
 | 
			
		||||
ARDUINO_FRAMEWORK_VERSION_LOOKUP = {
 | 
			
		||||
    "recommended": cv.Version(3, 2, 1),
 | 
			
		||||
    "latest": cv.Version(3, 3, 1),
 | 
			
		||||
    "dev": cv.Version(3, 3, 1),
 | 
			
		||||
    "latest": cv.Version(3, 3, 2),
 | 
			
		||||
    "dev": cv.Version(3, 3, 2),
 | 
			
		||||
}
 | 
			
		||||
ARDUINO_PLATFORM_VERSION_LOOKUP = {
 | 
			
		||||
    cv.Version(3, 3, 1): cv.Version(55, 3, 31),
 | 
			
		||||
    cv.Version(3, 3, 2): cv.Version(55, 3, 31, "1"),
 | 
			
		||||
    cv.Version(3, 3, 1): cv.Version(55, 3, 31, "1"),
 | 
			
		||||
    cv.Version(3, 3, 0): cv.Version(55, 3, 30, "2"),
 | 
			
		||||
    cv.Version(3, 2, 1): cv.Version(54, 3, 21, "2"),
 | 
			
		||||
    cv.Version(3, 2, 0): cv.Version(54, 3, 20),
 | 
			
		||||
@@ -336,8 +348,8 @@ ESP_IDF_FRAMEWORK_VERSION_LOOKUP = {
 | 
			
		||||
    "dev": cv.Version(5, 5, 1),
 | 
			
		||||
}
 | 
			
		||||
ESP_IDF_PLATFORM_VERSION_LOOKUP = {
 | 
			
		||||
    cv.Version(5, 5, 1): cv.Version(55, 3, 31),
 | 
			
		||||
    cv.Version(5, 5, 0): cv.Version(55, 3, 31),
 | 
			
		||||
    cv.Version(5, 5, 1): cv.Version(55, 3, 31, "1"),
 | 
			
		||||
    cv.Version(5, 5, 0): cv.Version(55, 3, 31, "1"),
 | 
			
		||||
    cv.Version(5, 4, 2): cv.Version(54, 3, 21, "2"),
 | 
			
		||||
    cv.Version(5, 4, 1): cv.Version(54, 3, 21, "2"),
 | 
			
		||||
    cv.Version(5, 4, 0): cv.Version(54, 3, 21, "2"),
 | 
			
		||||
@@ -352,8 +364,8 @@ ESP_IDF_PLATFORM_VERSION_LOOKUP = {
 | 
			
		||||
#  - https://github.com/pioarduino/platform-espressif32/releases
 | 
			
		||||
PLATFORM_VERSION_LOOKUP = {
 | 
			
		||||
    "recommended": cv.Version(54, 3, 21, "2"),
 | 
			
		||||
    "latest": cv.Version(55, 3, 31),
 | 
			
		||||
    "dev": "https://github.com/pioarduino/platform-espressif32.git#develop",
 | 
			
		||||
    "latest": cv.Version(55, 3, 31, "1"),
 | 
			
		||||
    "dev": cv.Version(55, 3, 31, "1"),
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -386,7 +398,7 @@ def _check_versions(value):
 | 
			
		||||
        value[CONF_SOURCE] = value.get(
 | 
			
		||||
            CONF_SOURCE, _format_framework_arduino_version(version)
 | 
			
		||||
        )
 | 
			
		||||
        if value[CONF_SOURCE].startswith("http"):
 | 
			
		||||
        if _is_framework_url(value[CONF_SOURCE]):
 | 
			
		||||
            value[CONF_SOURCE] = (
 | 
			
		||||
                f"pioarduino/framework-arduinoespressif32@{value[CONF_SOURCE]}"
 | 
			
		||||
            )
 | 
			
		||||
@@ -399,7 +411,7 @@ def _check_versions(value):
 | 
			
		||||
            CONF_SOURCE,
 | 
			
		||||
            _format_framework_espidf_version(version, value.get(CONF_RELEASE, None)),
 | 
			
		||||
        )
 | 
			
		||||
        if value[CONF_SOURCE].startswith("http"):
 | 
			
		||||
        if _is_framework_url(value[CONF_SOURCE]):
 | 
			
		||||
            value[CONF_SOURCE] = f"pioarduino/framework-espidf@{value[CONF_SOURCE]}"
 | 
			
		||||
 | 
			
		||||
    if CONF_PLATFORM_VERSION not in value:
 | 
			
		||||
@@ -645,6 +657,7 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
 | 
			
		||||
        + "Why change? ESP-IDF offers:\n"
 | 
			
		||||
        + color(AnsiFore.GREEN, "  ✨ Up to 40% smaller binaries\n")
 | 
			
		||||
        + color(AnsiFore.GREEN, "  🚀 Better performance and optimization\n")
 | 
			
		||||
        + color(AnsiFore.GREEN, "  ⚡ 2-3x faster compile times\n")
 | 
			
		||||
        + color(AnsiFore.GREEN, "  📦 Custom-built firmware for your exact needs\n")
 | 
			
		||||
        + color(
 | 
			
		||||
            AnsiFore.GREEN,
 | 
			
		||||
@@ -652,7 +665,6 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
 | 
			
		||||
        )
 | 
			
		||||
        + "\n"
 | 
			
		||||
        + "Trade-offs:\n"
 | 
			
		||||
        + color(AnsiFore.YELLOW, "  ⏱️  Compile times are ~25% longer\n")
 | 
			
		||||
        + color(AnsiFore.YELLOW, "  🔄 Some components need migration\n")
 | 
			
		||||
        + "\n"
 | 
			
		||||
        + "What should I do?\n"
 | 
			
		||||
 
 | 
			
		||||
@@ -285,6 +285,10 @@ def consume_connection_slots(
 | 
			
		||||
 | 
			
		||||
def validate_connection_slots(max_connections: int) -> None:
 | 
			
		||||
    """Validate that BLE connection slots don't exceed the configured maximum."""
 | 
			
		||||
    # Skip validation in testing mode to allow component grouping
 | 
			
		||||
    if CORE.testing_mode:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    ble_data = CORE.data.get(KEY_ESP32_BLE, {})
 | 
			
		||||
    used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
 | 
			
		||||
    num_used = len(used_slots)
 | 
			
		||||
@@ -332,12 +336,16 @@ def final_validation(config):
 | 
			
		||||
 | 
			
		||||
    # Check if BLE Server is needed
 | 
			
		||||
    has_ble_server = "esp32_ble_server" in full_config
 | 
			
		||||
    add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server)
 | 
			
		||||
 | 
			
		||||
    # Check if BLE Client is needed (via esp32_ble_tracker or esp32_ble_client)
 | 
			
		||||
    has_ble_client = (
 | 
			
		||||
        "esp32_ble_tracker" in full_config or "esp32_ble_client" in full_config
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # ESP-IDF BLE stack requires GATT Server to be enabled when GATT Client is enabled
 | 
			
		||||
    # This is an internal dependency in the Bluedroid stack (tested ESP-IDF 5.4.2-5.5.1)
 | 
			
		||||
    # See: https://github.com/espressif/esp-idf/issues/17724
 | 
			
		||||
    add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server or has_ble_client)
 | 
			
		||||
    add_idf_sdkconfig_option("CONFIG_BT_GATTC_ENABLE", has_ble_client)
 | 
			
		||||
 | 
			
		||||
    # Handle max_connections: check for deprecated location in esp32_ble_tracker
 | 
			
		||||
 
 | 
			
		||||
@@ -14,10 +14,6 @@
 | 
			
		||||
#include "esphome/core/hal.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ARDUINO
 | 
			
		||||
#include <esp32-hal-bt.h>
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
namespace esphome {
 | 
			
		||||
namespace esp32_ble_beacon {
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from esphome import automation
 | 
			
		||||
@@ -52,9 +53,19 @@ class BLEFeatures(StrEnum):
 | 
			
		||||
    ESP_BT_DEVICE = "ESP_BT_DEVICE"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Dataclass for registration counts
 | 
			
		||||
@dataclass
 | 
			
		||||
class RegistrationCounts:
 | 
			
		||||
    listeners: int = 0
 | 
			
		||||
    clients: int = 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Set to track which features are needed by components
 | 
			
		||||
_required_features: set[BLEFeatures] = set()
 | 
			
		||||
 | 
			
		||||
# Track registration counts for StaticVector sizing
 | 
			
		||||
_registration_counts = RegistrationCounts()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def register_ble_features(features: set[BLEFeatures]) -> None:
 | 
			
		||||
    """Register BLE features that a component needs.
 | 
			
		||||
@@ -257,12 +268,14 @@ async def to_code(config):
 | 
			
		||||
        register_ble_features({BLEFeatures.ESP_BT_DEVICE})
 | 
			
		||||
 | 
			
		||||
    for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
 | 
			
		||||
        _registration_counts.listeners += 1
 | 
			
		||||
        trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
 | 
			
		||||
        if CONF_MAC_ADDRESS in conf:
 | 
			
		||||
            addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
 | 
			
		||||
            cg.add(trigger.set_addresses(addr_list))
 | 
			
		||||
        await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
 | 
			
		||||
    for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):
 | 
			
		||||
        _registration_counts.listeners += 1
 | 
			
		||||
        trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
 | 
			
		||||
        if len(conf[CONF_SERVICE_UUID]) == len(bt_uuid16_format):
 | 
			
		||||
            cg.add(trigger.set_service_uuid16(as_hex(conf[CONF_SERVICE_UUID])))
 | 
			
		||||
@@ -275,6 +288,7 @@ async def to_code(config):
 | 
			
		||||
            cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
 | 
			
		||||
        await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
 | 
			
		||||
    for conf in config.get(CONF_ON_BLE_MANUFACTURER_DATA_ADVERTISE, []):
 | 
			
		||||
        _registration_counts.listeners += 1
 | 
			
		||||
        trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
 | 
			
		||||
        if len(conf[CONF_MANUFACTURER_ID]) == len(bt_uuid16_format):
 | 
			
		||||
            cg.add(trigger.set_manufacturer_uuid16(as_hex(conf[CONF_MANUFACTURER_ID])))
 | 
			
		||||
@@ -287,6 +301,7 @@ async def to_code(config):
 | 
			
		||||
            cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
 | 
			
		||||
        await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
 | 
			
		||||
    for conf in config.get(CONF_ON_SCAN_END, []):
 | 
			
		||||
        _registration_counts.listeners += 1
 | 
			
		||||
        trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
 | 
			
		||||
        await automation.build_automation(trigger, [], conf)
 | 
			
		||||
 | 
			
		||||
@@ -320,6 +335,17 @@ async def _add_ble_features():
 | 
			
		||||
        cg.add_define("USE_ESP32_BLE_DEVICE")
 | 
			
		||||
        cg.add_define("USE_ESP32_BLE_UUID")
 | 
			
		||||
 | 
			
		||||
    # Add defines for StaticVector sizing based on registration counts
 | 
			
		||||
    # Only define if count > 0 to avoid allocating unnecessary memory
 | 
			
		||||
    if _registration_counts.listeners > 0:
 | 
			
		||||
        cg.add_define(
 | 
			
		||||
            "ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", _registration_counts.listeners
 | 
			
		||||
        )
 | 
			
		||||
    if _registration_counts.clients > 0:
 | 
			
		||||
        cg.add_define(
 | 
			
		||||
            "ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", _registration_counts.clients
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ESP32_BLE_START_SCAN_ACTION_SCHEMA = cv.Schema(
 | 
			
		||||
    {
 | 
			
		||||
@@ -369,6 +395,7 @@ async def register_ble_device(
 | 
			
		||||
    var: cg.SafeExpType, config: ConfigType
 | 
			
		||||
) -> cg.SafeExpType:
 | 
			
		||||
    register_ble_features({BLEFeatures.ESP_BT_DEVICE})
 | 
			
		||||
    _registration_counts.listeners += 1
 | 
			
		||||
    paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
 | 
			
		||||
    cg.add(paren.register_listener(var))
 | 
			
		||||
    return var
 | 
			
		||||
@@ -376,6 +403,7 @@ async def register_ble_device(
 | 
			
		||||
 | 
			
		||||
async def register_client(var: cg.SafeExpType, config: ConfigType) -> cg.SafeExpType:
 | 
			
		||||
    register_ble_features({BLEFeatures.ESP_BT_DEVICE})
 | 
			
		||||
    _registration_counts.clients += 1
 | 
			
		||||
    paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
 | 
			
		||||
    cg.add(paren.register_client(var))
 | 
			
		||||
    return var
 | 
			
		||||
@@ -389,6 +417,7 @@ async def register_raw_ble_device(
 | 
			
		||||
    This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
 | 
			
		||||
    will not be compiled in if this is the only registration method used.
 | 
			
		||||
    """
 | 
			
		||||
    _registration_counts.listeners += 1
 | 
			
		||||
    paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
 | 
			
		||||
    cg.add(paren.register_listener(var))
 | 
			
		||||
    return var
 | 
			
		||||
@@ -402,6 +431,7 @@ async def register_raw_client(
 | 
			
		||||
    This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
 | 
			
		||||
    will not be compiled in if this is the only registration method used.
 | 
			
		||||
    """
 | 
			
		||||
    _registration_counts.clients += 1
 | 
			
		||||
    paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
 | 
			
		||||
    cg.add(paren.register_client(var))
 | 
			
		||||
    return var
 | 
			
		||||
 
 | 
			
		||||
@@ -25,10 +25,6 @@
 | 
			
		||||
#include <esp_coexist.h>
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ARDUINO
 | 
			
		||||
#include <esp32-hal-bt.h>
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#define MBEDTLS_AES_ALT
 | 
			
		||||
#include <aes_alt.h>
 | 
			
		||||
 | 
			
		||||
@@ -78,9 +74,11 @@ void ESP32BLETracker::setup() {
 | 
			
		||||
      [this](ota::OTAState state, float progress, uint8_t error, ota::OTAComponent *comp) {
 | 
			
		||||
        if (state == ota::OTA_STARTED) {
 | 
			
		||||
          this->stop_scan();
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
          for (auto *client : this->clients_) {
 | 
			
		||||
            client->disconnect();
 | 
			
		||||
          }
 | 
			
		||||
#endif
 | 
			
		||||
        }
 | 
			
		||||
      });
 | 
			
		||||
#endif
 | 
			
		||||
@@ -210,8 +208,10 @@ void ESP32BLETracker::start_scan_(bool first) {
 | 
			
		||||
  this->set_scanner_state_(ScannerState::STARTING);
 | 
			
		||||
  ESP_LOGD(TAG, "Starting scan, set scanner state to STARTING.");
 | 
			
		||||
  if (!first) {
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
    for (auto *listener : this->listeners_)
 | 
			
		||||
      listener->on_scan_end();
 | 
			
		||||
#endif
 | 
			
		||||
  }
 | 
			
		||||
#ifdef USE_ESP32_BLE_DEVICE
 | 
			
		||||
  this->already_discovered_.clear();
 | 
			
		||||
@@ -240,20 +240,25 @@ void ESP32BLETracker::start_scan_(bool first) {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::register_client(ESPBTClient *client) {
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  client->app_id = ++this->app_id_;
 | 
			
		||||
  this->clients_.push_back(client);
 | 
			
		||||
  this->recalculate_advertisement_parser_types();
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::register_listener(ESPBTDeviceListener *listener) {
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
  listener->set_parent(this);
 | 
			
		||||
  this->listeners_.push_back(listener);
 | 
			
		||||
  this->recalculate_advertisement_parser_types();
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::recalculate_advertisement_parser_types() {
 | 
			
		||||
  this->raw_advertisements_ = false;
 | 
			
		||||
  this->parse_advertisements_ = false;
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
  for (auto *listener : this->listeners_) {
 | 
			
		||||
    if (listener->get_advertisement_parser_type() == AdvertisementParserType::PARSED_ADVERTISEMENTS) {
 | 
			
		||||
      this->parse_advertisements_ = true;
 | 
			
		||||
@@ -261,6 +266,8 @@ void ESP32BLETracker::recalculate_advertisement_parser_types() {
 | 
			
		||||
      this->raw_advertisements_ = true;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  for (auto *client : this->clients_) {
 | 
			
		||||
    if (client->get_advertisement_parser_type() == AdvertisementParserType::PARSED_ADVERTISEMENTS) {
 | 
			
		||||
      this->parse_advertisements_ = true;
 | 
			
		||||
@@ -268,6 +275,7 @@ void ESP32BLETracker::recalculate_advertisement_parser_types() {
 | 
			
		||||
      this->raw_advertisements_ = true;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_gap_cb_param_t *param) {
 | 
			
		||||
@@ -287,9 +295,11 @@ void ESP32BLETracker::gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_ga
 | 
			
		||||
      break;
 | 
			
		||||
  }
 | 
			
		||||
    // Forward all events to clients (scan results are handled separately via gap_scan_event_handler)
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  for (auto *client : this->clients_) {
 | 
			
		||||
    client->gap_event_handler(event, param);
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::gap_scan_event_handler(const BLEScanResult &scan_result) {
 | 
			
		||||
@@ -352,9 +362,11 @@ void ESP32BLETracker::gap_scan_stop_complete_(const esp_ble_gap_cb_param_t::ble_
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_if,
 | 
			
		||||
                                          esp_ble_gattc_cb_param_t *param) {
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  for (auto *client : this->clients_) {
 | 
			
		||||
    client->gattc_event_handler(event, gattc_if, param);
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::set_scanner_state_(ScannerState state) {
 | 
			
		||||
@@ -708,12 +720,16 @@ bool ESPBTDevice::resolve_irk(const uint8_t *irk) const {
 | 
			
		||||
void ESP32BLETracker::process_scan_result_(const BLEScanResult &scan_result) {
 | 
			
		||||
  // Process raw advertisements
 | 
			
		||||
  if (this->raw_advertisements_) {
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
    for (auto *listener : this->listeners_) {
 | 
			
		||||
      listener->parse_devices(&scan_result, 1);
 | 
			
		||||
    }
 | 
			
		||||
#endif
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
    for (auto *client : this->clients_) {
 | 
			
		||||
      client->parse_devices(&scan_result, 1);
 | 
			
		||||
    }
 | 
			
		||||
#endif
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Process parsed advertisements
 | 
			
		||||
@@ -723,16 +739,20 @@ void ESP32BLETracker::process_scan_result_(const BLEScanResult &scan_result) {
 | 
			
		||||
    device.parse_scan_rst(scan_result);
 | 
			
		||||
 | 
			
		||||
    bool found = false;
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
    for (auto *listener : this->listeners_) {
 | 
			
		||||
      if (listener->parse_device(device))
 | 
			
		||||
        found = true;
 | 
			
		||||
    }
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
    for (auto *client : this->clients_) {
 | 
			
		||||
      if (client->parse_device(device)) {
 | 
			
		||||
        found = true;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
    if (!found && !this->scan_continuous_) {
 | 
			
		||||
      this->print_bt_device_info(device);
 | 
			
		||||
@@ -749,8 +769,10 @@ void ESP32BLETracker::cleanup_scan_state_(bool is_stop_complete) {
 | 
			
		||||
  // Reset timeout state machine instead of cancelling scheduler timeout
 | 
			
		||||
  this->scan_timeout_state_ = ScanTimeoutState::INACTIVE;
 | 
			
		||||
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
  for (auto *listener : this->listeners_)
 | 
			
		||||
    listener->on_scan_end();
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
  this->set_scanner_state_(ScannerState::IDLE);
 | 
			
		||||
}
 | 
			
		||||
@@ -774,6 +796,7 @@ void ESP32BLETracker::handle_scanner_failure_() {
 | 
			
		||||
 | 
			
		||||
void ESP32BLETracker::try_promote_discovered_clients_() {
 | 
			
		||||
  // Only promote the first discovered client to avoid multiple simultaneous connections
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  for (auto *client : this->clients_) {
 | 
			
		||||
    if (client->state() != ClientState::DISCOVERED) {
 | 
			
		||||
      continue;
 | 
			
		||||
@@ -795,6 +818,7 @@ void ESP32BLETracker::try_promote_discovered_clients_() {
 | 
			
		||||
    client->connect();
 | 
			
		||||
    break;
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
const char *ESP32BLETracker::scanner_state_to_string_(ScannerState state) const {
 | 
			
		||||
 
 | 
			
		||||
@@ -302,6 +302,7 @@ class ESP32BLETracker : public Component,
 | 
			
		||||
  /// Count clients in each state
 | 
			
		||||
  ClientStateCounts count_client_states_() const {
 | 
			
		||||
    ClientStateCounts counts;
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
    for (auto *client : this->clients_) {
 | 
			
		||||
      switch (client->state()) {
 | 
			
		||||
        case ClientState::DISCONNECTING:
 | 
			
		||||
@@ -317,12 +318,17 @@ class ESP32BLETracker : public Component,
 | 
			
		||||
          break;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
#endif
 | 
			
		||||
    return counts;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Group 1: Large objects (12+ bytes) - vectors and callback manager
 | 
			
		||||
  std::vector<ESPBTDeviceListener *> listeners_;
 | 
			
		||||
  std::vector<ESPBTClient *> clients_;
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
 | 
			
		||||
  StaticVector<ESPBTDeviceListener *, ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT> listeners_;
 | 
			
		||||
#endif
 | 
			
		||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
 | 
			
		||||
  StaticVector<ESPBTClient *, ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT> clients_;
 | 
			
		||||
#endif
 | 
			
		||||
  CallbackManager<void(ScannerState)> scanner_state_callbacks_;
 | 
			
		||||
#ifdef USE_ESP32_BLE_DEVICE
 | 
			
		||||
  /// Vector of addresses that have already been printed in print_bt_device_info
 | 
			
		||||
 
 | 
			
		||||
@@ -8,6 +8,13 @@ namespace json {
 | 
			
		||||
 | 
			
		||||
static const char *const TAG = "json";
 | 
			
		||||
 | 
			
		||||
#ifdef USE_PSRAM
 | 
			
		||||
// Global allocator that outlives all JsonDocuments returned by parse_json()
 | 
			
		||||
// This prevents dangling pointer issues when JsonDocuments are returned from functions
 | 
			
		||||
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) - Must be mutable for ArduinoJson::Allocator
 | 
			
		||||
static SpiRamAllocator global_json_allocator;
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
std::string build_json(const json_build_t &f) {
 | 
			
		||||
  // NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
 | 
			
		||||
  JsonBuilder builder;
 | 
			
		||||
@@ -33,8 +40,7 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
 | 
			
		||||
    return JsonObject();  // return unbound object
 | 
			
		||||
  }
 | 
			
		||||
#ifdef USE_PSRAM
 | 
			
		||||
  auto doc_allocator = SpiRamAllocator();
 | 
			
		||||
  JsonDocument json_document(&doc_allocator);
 | 
			
		||||
  JsonDocument json_document(&global_json_allocator);
 | 
			
		||||
#else
 | 
			
		||||
  JsonDocument json_document;
 | 
			
		||||
#endif
 | 
			
		||||
 
 | 
			
		||||
@@ -21,11 +21,11 @@ template<uint8_t N> class MCP23XXXBase : public Component, public gpio_expander:
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  // read a given register
 | 
			
		||||
  virtual bool read_reg(uint8_t reg, uint8_t *value);
 | 
			
		||||
  virtual bool read_reg(uint8_t reg, uint8_t *value) = 0;
 | 
			
		||||
  // write a value to a given register
 | 
			
		||||
  virtual bool write_reg(uint8_t reg, uint8_t value);
 | 
			
		||||
  virtual bool write_reg(uint8_t reg, uint8_t value) = 0;
 | 
			
		||||
  // update registers with given pin value.
 | 
			
		||||
  virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a);
 | 
			
		||||
  virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a) = 0;
 | 
			
		||||
 | 
			
		||||
  bool open_drain_ints_;
 | 
			
		||||
};
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ from esphome.const import (
 | 
			
		||||
    CONF_SERVICES,
 | 
			
		||||
    PlatformFramework,
 | 
			
		||||
)
 | 
			
		||||
from esphome.core import CORE, coroutine_with_priority
 | 
			
		||||
from esphome.core import CORE, Lambda, coroutine_with_priority
 | 
			
		||||
from esphome.coroutine import CoroPriority
 | 
			
		||||
 | 
			
		||||
CODEOWNERS = ["@esphome/core"]
 | 
			
		||||
@@ -58,17 +58,64 @@ CONFIG_SCHEMA = cv.All(
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mdns_txt_record(key: str, value: str):
 | 
			
		||||
    return cg.StructInitializer(
 | 
			
		||||
        MDNSTXTRecord,
 | 
			
		||||
        ("key", cg.RawExpression(f"MDNS_STR({cg.safe_exp(key)})")),
 | 
			
		||||
        ("value", value),
 | 
			
		||||
def mdns_txt_record(key: str, value: str) -> cg.RawExpression:
 | 
			
		||||
    """Create a mDNS TXT record.
 | 
			
		||||
 | 
			
		||||
    Public API for external components. Do not remove.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        key: The TXT record key
 | 
			
		||||
        value: The TXT record value (static string only)
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        A RawExpression representing a MDNSTXTRecord struct
 | 
			
		||||
    """
 | 
			
		||||
    return cg.RawExpression(
 | 
			
		||||
        f"{{MDNS_STR({cg.safe_exp(key)}), MDNS_STR({cg.safe_exp(value)})}}"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def _mdns_txt_record_templated(
 | 
			
		||||
    mdns_comp: cg.Pvariable, key: str, value: Lambda | str
 | 
			
		||||
) -> cg.RawExpression:
 | 
			
		||||
    """Create a mDNS TXT record with support for templated values.
 | 
			
		||||
 | 
			
		||||
    Internal helper function.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        mdns_comp: The MDNSComponent instance (from cg.get_variable())
 | 
			
		||||
        key: The TXT record key
 | 
			
		||||
        value: The TXT record value (can be a static string or a lambda template)
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        A RawExpression representing a MDNSTXTRecord struct
 | 
			
		||||
    """
 | 
			
		||||
    if not cg.is_template(value):
 | 
			
		||||
        # It's a static string - use directly in flash, no need to store in vector
 | 
			
		||||
        return mdns_txt_record(key, value)
 | 
			
		||||
    # It's a lambda - evaluate and store using helper
 | 
			
		||||
    templated_value = await cg.templatable(value, [], cg.std_string)
 | 
			
		||||
    safe_key = cg.safe_exp(key)
 | 
			
		||||
    dynamic_call = f"{mdns_comp}->add_dynamic_txt_value(({templated_value})())"
 | 
			
		||||
    return cg.RawExpression(f"{{MDNS_STR({safe_key}), MDNS_STR({dynamic_call})}}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mdns_service(
 | 
			
		||||
    service: str, proto: str, port: int, txt_records: list[dict[str, str]]
 | 
			
		||||
):
 | 
			
		||||
    service: str, proto: str, port: int, txt_records: list[cg.RawExpression]
 | 
			
		||||
) -> cg.StructInitializer:
 | 
			
		||||
    """Create a mDNS service.
 | 
			
		||||
 | 
			
		||||
    Public API for external components. Do not remove.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        service: Service name (e.g., "_http")
 | 
			
		||||
        proto: Protocol (e.g., "_tcp" or "_udp")
 | 
			
		||||
        port: Port number
 | 
			
		||||
        txt_records: List of MDNSTXTRecord expressions
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        A StructInitializer representing a MDNSService struct
 | 
			
		||||
    """
 | 
			
		||||
    return cg.StructInitializer(
 | 
			
		||||
        MDNSService,
 | 
			
		||||
        ("service_type", cg.RawExpression(f"MDNS_STR({cg.safe_exp(service)})")),
 | 
			
		||||
@@ -107,23 +154,37 @@ async def to_code(config):
 | 
			
		||||
    # Ensure at least 1 service (fallback service)
 | 
			
		||||
    cg.add_define("MDNS_SERVICE_COUNT", max(1, service_count))
 | 
			
		||||
 | 
			
		||||
    # Calculate compile-time dynamic TXT value count
 | 
			
		||||
    # Dynamic values are those that cannot be stored in flash at compile time
 | 
			
		||||
    dynamic_txt_count = 0
 | 
			
		||||
    if "api" in CORE.config:
 | 
			
		||||
        # Always: get_mac_address()
 | 
			
		||||
        dynamic_txt_count += 1
 | 
			
		||||
    # User-provided templatable TXT values (only lambdas, not static strings)
 | 
			
		||||
    dynamic_txt_count += sum(
 | 
			
		||||
        1
 | 
			
		||||
        for service in config[CONF_SERVICES]
 | 
			
		||||
        for txt_value in service[CONF_TXT].values()
 | 
			
		||||
        if cg.is_template(txt_value)
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Ensure at least 1 to avoid zero-size array
 | 
			
		||||
    cg.add_define("MDNS_DYNAMIC_TXT_COUNT", max(1, dynamic_txt_count))
 | 
			
		||||
 | 
			
		||||
    var = cg.new_Pvariable(config[CONF_ID])
 | 
			
		||||
    await cg.register_component(var, config)
 | 
			
		||||
 | 
			
		||||
    for service in config[CONF_SERVICES]:
 | 
			
		||||
        txt = [
 | 
			
		||||
            cg.StructInitializer(
 | 
			
		||||
                MDNSTXTRecord,
 | 
			
		||||
                ("key", cg.RawExpression(f"MDNS_STR({cg.safe_exp(txt_key)})")),
 | 
			
		||||
                ("value", await cg.templatable(txt_value, [], cg.std_string)),
 | 
			
		||||
            )
 | 
			
		||||
        txt_records = [
 | 
			
		||||
            await _mdns_txt_record_templated(var, txt_key, txt_value)
 | 
			
		||||
            for txt_key, txt_value in service[CONF_TXT].items()
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        exp = mdns_service(
 | 
			
		||||
            service[CONF_SERVICE],
 | 
			
		||||
            service[CONF_PROTOCOL],
 | 
			
		||||
            await cg.templatable(service[CONF_PORT], [], cg.uint16),
 | 
			
		||||
            txt,
 | 
			
		||||
            txt_records,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        cg.add(var.add_extra_service(exp))
 | 
			
		||||
 
 | 
			
		||||
@@ -9,21 +9,9 @@
 | 
			
		||||
#include <pgmspace.h>
 | 
			
		||||
// Macro to define strings in PROGMEM on ESP8266, regular memory on other platforms
 | 
			
		||||
#define MDNS_STATIC_CONST_CHAR(name, value) static const char name[] PROGMEM = value
 | 
			
		||||
// Helper to convert PROGMEM string to std::string for TemplatableValue
 | 
			
		||||
// Only define this function if we have services that will use it
 | 
			
		||||
#if defined(USE_API) || defined(USE_PROMETHEUS) || defined(USE_WEBSERVER) || defined(USE_MDNS_EXTRA_SERVICES)
 | 
			
		||||
static std::string mdns_str_value(PGM_P str) {
 | 
			
		||||
  char buf[64];
 | 
			
		||||
  strncpy_P(buf, str, sizeof(buf) - 1);
 | 
			
		||||
  buf[sizeof(buf) - 1] = '\0';
 | 
			
		||||
  return std::string(buf);
 | 
			
		||||
}
 | 
			
		||||
#define MDNS_STR_VALUE(name) mdns_str_value(name)
 | 
			
		||||
#endif
 | 
			
		||||
#else
 | 
			
		||||
// On non-ESP8266 platforms, use regular const char*
 | 
			
		||||
#define MDNS_STATIC_CONST_CHAR(name, value) static constexpr const char name[] = value
 | 
			
		||||
#define MDNS_STR_VALUE(name) std::string(name)
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_API
 | 
			
		||||
@@ -43,30 +31,10 @@ static const char *const TAG = "mdns";
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
// Define all constant strings using the macro
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(SERVICE_TCP, "_tcp");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
			
		||||
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
 | 
			
		||||
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
 | 
			
		||||
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
 | 
			
		||||
// Wrap build-time defines into flash storage
 | 
			
		||||
MDNS_STATIC_CONST_CHAR(VALUE_VERSION, ESPHOME_VERSION);
 | 
			
		||||
 | 
			
		||||
void MDNSComponent::compile_records_() {
 | 
			
		||||
  this->hostname_ = App.get_name();
 | 
			
		||||
@@ -75,6 +43,15 @@ void MDNSComponent::compile_records_() {
 | 
			
		||||
  // in mdns/__init__.py. If you add a new service here, update both locations.
 | 
			
		||||
 | 
			
		||||
#ifdef USE_API
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(VALUE_BOARD, ESPHOME_BOARD);
 | 
			
		||||
 | 
			
		||||
  if (api::global_api_server != nullptr) {
 | 
			
		||||
    auto &service = this->services_.emplace_next();
 | 
			
		||||
    service.service_type = MDNS_STR(SERVICE_ESPHOMELIB);
 | 
			
		||||
@@ -109,52 +86,66 @@ void MDNSComponent::compile_records_() {
 | 
			
		||||
    txt_records.reserve(txt_count);
 | 
			
		||||
 | 
			
		||||
    if (!friendly_name_empty) {
 | 
			
		||||
      txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), friendly_name});
 | 
			
		||||
      txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), MDNS_STR(friendly_name.c_str())});
 | 
			
		||||
    }
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_VERSION), ESPHOME_VERSION});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_MAC), get_mac_address()});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_MAC), MDNS_STR(this->add_dynamic_txt_value(get_mac_address()))});
 | 
			
		||||
 | 
			
		||||
#ifdef USE_ESP8266
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_ESP8266)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP8266)});
 | 
			
		||||
#elif defined(USE_ESP32)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_ESP32)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP32)});
 | 
			
		||||
#elif defined(USE_RP2040)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_RP2040)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_RP2040)});
 | 
			
		||||
#elif defined(USE_LIBRETINY)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), lt_cpu_get_model_name()});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(lt_cpu_get_model_name())});
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_BOARD), ESPHOME_BOARD});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_BOARD), MDNS_STR(VALUE_BOARD)});
 | 
			
		||||
 | 
			
		||||
#if defined(USE_WIFI)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_WIFI)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_WIFI)});
 | 
			
		||||
#elif defined(USE_ETHERNET)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_ETHERNET)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_ETHERNET)});
 | 
			
		||||
#elif defined(USE_OPENTHREAD)
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_THREAD)});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_THREAD)});
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_API_NOISE
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(NOISE_ENCRYPTION, "Noise_NNpsk0_25519_ChaChaPoly_SHA256");
 | 
			
		||||
    if (api::global_api_server->get_noise_ctx()->has_psk()) {
 | 
			
		||||
      txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION), MDNS_STR_VALUE(NOISE_ENCRYPTION)});
 | 
			
		||||
    } else {
 | 
			
		||||
      txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION_SUPPORTED), MDNS_STR_VALUE(NOISE_ENCRYPTION)});
 | 
			
		||||
    }
 | 
			
		||||
    bool has_psk = api::global_api_server->get_noise_ctx()->has_psk();
 | 
			
		||||
    const char *encryption_key = has_psk ? TXT_API_ENCRYPTION : TXT_API_ENCRYPTION_SUPPORTED;
 | 
			
		||||
    txt_records.push_back({MDNS_STR(encryption_key), MDNS_STR(NOISE_ENCRYPTION)});
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef ESPHOME_PROJECT_NAME
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), ESPHOME_PROJECT_NAME});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), ESPHOME_PROJECT_VERSION});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_NAME, ESPHOME_PROJECT_NAME);
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_VERSION, ESPHOME_PROJECT_VERSION);
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), MDNS_STR(VALUE_PROJECT_NAME)});
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), MDNS_STR(VALUE_PROJECT_VERSION)});
 | 
			
		||||
#endif  // ESPHOME_PROJECT_NAME
 | 
			
		||||
 | 
			
		||||
#ifdef USE_DASHBOARD_IMPORT
 | 
			
		||||
    txt_records.push_back({MDNS_STR(TXT_PACKAGE_IMPORT_URL), dashboard_import::get_package_import_url()});
 | 
			
		||||
    MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
 | 
			
		||||
    txt_records.push_back(
 | 
			
		||||
        {MDNS_STR(TXT_PACKAGE_IMPORT_URL), MDNS_STR(dashboard_import::get_package_import_url().c_str())});
 | 
			
		||||
#endif
 | 
			
		||||
  }
 | 
			
		||||
#endif  // USE_API
 | 
			
		||||
 | 
			
		||||
#ifdef USE_PROMETHEUS
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
 | 
			
		||||
 | 
			
		||||
  auto &prom_service = this->services_.emplace_next();
 | 
			
		||||
  prom_service.service_type = MDNS_STR(SERVICE_PROMETHEUS);
 | 
			
		||||
  prom_service.proto = MDNS_STR(SERVICE_TCP);
 | 
			
		||||
@@ -162,6 +153,8 @@ void MDNSComponent::compile_records_() {
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#ifdef USE_WEBSERVER
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
			
		||||
 | 
			
		||||
  auto &web_service = this->services_.emplace_next();
 | 
			
		||||
  web_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
			
		||||
  web_service.proto = MDNS_STR(SERVICE_TCP);
 | 
			
		||||
@@ -169,13 +162,16 @@ void MDNSComponent::compile_records_() {
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
#if !defined(USE_API) && !defined(USE_PROMETHEUS) && !defined(USE_WEBSERVER) && !defined(USE_MDNS_EXTRA_SERVICES)
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
			
		||||
  MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
			
		||||
 | 
			
		||||
  // Publish "http" service if not using native API or any other services
 | 
			
		||||
  // This is just to have *some* mDNS service so that .local resolution works
 | 
			
		||||
  auto &fallback_service = this->services_.emplace_next();
 | 
			
		||||
  fallback_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
			
		||||
  fallback_service.proto = MDNS_STR(SERVICE_TCP);
 | 
			
		||||
  fallback_service.port = USE_WEBSERVER_PORT;
 | 
			
		||||
  fallback_service.txt_records.push_back({MDNS_STR(TXT_VERSION), ESPHOME_VERSION});
 | 
			
		||||
  fallback_service.txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
 | 
			
		||||
#endif
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -190,8 +186,7 @@ void MDNSComponent::dump_config() {
 | 
			
		||||
    ESP_LOGV(TAG, "  - %s, %s, %d", MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto),
 | 
			
		||||
             const_cast<TemplatableValue<uint16_t> &>(service.port).value());
 | 
			
		||||
    for (const auto &record : service.txt_records) {
 | 
			
		||||
      ESP_LOGV(TAG, "    TXT: %s = %s", MDNS_STR_ARG(record.key),
 | 
			
		||||
               const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
			
		||||
      ESP_LOGV(TAG, "    TXT: %s = %s", MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
#endif
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ struct MDNSString;
 | 
			
		||||
 | 
			
		||||
struct MDNSTXTRecord {
 | 
			
		||||
  const MDNSString *key;
 | 
			
		||||
  TemplatableValue<std::string> value;
 | 
			
		||||
  const MDNSString *value;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
struct MDNSService {
 | 
			
		||||
@@ -59,6 +59,17 @@ class MDNSComponent : public Component {
 | 
			
		||||
 | 
			
		||||
  void on_shutdown() override;
 | 
			
		||||
 | 
			
		||||
  /// Add a dynamic TXT value and return pointer to it for use in MDNSTXTRecord
 | 
			
		||||
  const char *add_dynamic_txt_value(const std::string &value) {
 | 
			
		||||
    this->dynamic_txt_values_.push_back(value);
 | 
			
		||||
    return this->dynamic_txt_values_[this->dynamic_txt_values_.size() - 1].c_str();
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  /// Storage for runtime-generated TXT values (MAC address, user lambdas)
 | 
			
		||||
  /// Pre-sized at compile time via MDNS_DYNAMIC_TXT_COUNT to avoid heap allocations.
 | 
			
		||||
  /// Static/compile-time values (version, board, etc.) are stored directly in flash and don't use this.
 | 
			
		||||
  StaticVector<std::string, MDNS_DYNAMIC_TXT_COUNT> dynamic_txt_values_;
 | 
			
		||||
 | 
			
		||||
 protected:
 | 
			
		||||
  StaticVector<MDNSService, MDNS_SERVICE_COUNT> services_{};
 | 
			
		||||
  std::string hostname_;
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,6 @@
 | 
			
		||||
#if defined(USE_ESP32) && defined(USE_MDNS)
 | 
			
		||||
 | 
			
		||||
#include <mdns.h>
 | 
			
		||||
#include <cstring>
 | 
			
		||||
#include "esphome/core/hal.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
#include "mdns_component.h"
 | 
			
		||||
@@ -29,21 +28,16 @@ void MDNSComponent::setup() {
 | 
			
		||||
    std::vector<mdns_txt_item_t> txt_records;
 | 
			
		||||
    for (const auto &record : service.txt_records) {
 | 
			
		||||
      mdns_txt_item_t it{};
 | 
			
		||||
      // key is a compile-time string literal in flash, no need to strdup
 | 
			
		||||
      // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_
 | 
			
		||||
      // Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies
 | 
			
		||||
      it.key = MDNS_STR_ARG(record.key);
 | 
			
		||||
      // value is a temporary from TemplatableValue, must strdup to keep it alive
 | 
			
		||||
      it.value = strdup(const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
			
		||||
      it.value = MDNS_STR_ARG(record.value);
 | 
			
		||||
      txt_records.push_back(it);
 | 
			
		||||
    }
 | 
			
		||||
    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
			
		||||
    err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
 | 
			
		||||
                           txt_records.data(), txt_records.size());
 | 
			
		||||
 | 
			
		||||
    // free records
 | 
			
		||||
    for (const auto &it : txt_records) {
 | 
			
		||||
      free((void *) it.value);  // NOLINT(cppcoreguidelines-no-malloc)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (err != ESP_OK) {
 | 
			
		||||
      ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
 | 
			
		||||
    }
 | 
			
		||||
 
 | 
			
		||||
@@ -33,7 +33,7 @@ void MDNSComponent::setup() {
 | 
			
		||||
    MDNS.addService(FPSTR(service_type), FPSTR(proto), port);
 | 
			
		||||
    for (const auto &record : service.txt_records) {
 | 
			
		||||
      MDNS.addServiceTxt(FPSTR(service_type), FPSTR(proto), FPSTR(MDNS_STR_ARG(record.key)),
 | 
			
		||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
			
		||||
                         FPSTR(MDNS_STR_ARG(record.value)));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -32,8 +32,7 @@ void MDNSComponent::setup() {
 | 
			
		||||
    uint16_t port_ = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
			
		||||
    MDNS.addService(service_type, proto, port_);
 | 
			
		||||
    for (const auto &record : service.txt_records) {
 | 
			
		||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key),
 | 
			
		||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
			
		||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -32,8 +32,7 @@ void MDNSComponent::setup() {
 | 
			
		||||
    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
			
		||||
    MDNS.addService(service_type, proto, port);
 | 
			
		||||
    for (const auto &record : service.txt_records) {
 | 
			
		||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key),
 | 
			
		||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
			
		||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,7 @@
 | 
			
		||||
 | 
			
		||||
#include "opentherm.h"
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
#include "driver/timer.h"
 | 
			
		||||
#include "esp_err.h"
 | 
			
		||||
#endif
 | 
			
		||||
@@ -31,7 +31,7 @@ OpenTherm *OpenTherm::instance = nullptr;
 | 
			
		||||
OpenTherm::OpenTherm(InternalGPIOPin *in_pin, InternalGPIOPin *out_pin, int32_t device_timeout)
 | 
			
		||||
    : in_pin_(in_pin),
 | 
			
		||||
      out_pin_(out_pin),
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
      timer_group_(TIMER_GROUP_0),
 | 
			
		||||
      timer_idx_(TIMER_0),
 | 
			
		||||
#endif
 | 
			
		||||
@@ -57,7 +57,7 @@ bool OpenTherm::initialize() {
 | 
			
		||||
  this->out_pin_->setup();
 | 
			
		||||
  this->out_pin_->digital_write(true);
 | 
			
		||||
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  return this->init_esp32_timer_();
 | 
			
		||||
#else
 | 
			
		||||
  return true;
 | 
			
		||||
@@ -238,7 +238,7 @@ void IRAM_ATTR OpenTherm::write_bit_(uint8_t high, uint8_t clock) {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
 | 
			
		||||
bool OpenTherm::init_esp32_timer_() {
 | 
			
		||||
  // Search for a free timer. Maybe unstable, we'll see.
 | 
			
		||||
@@ -365,7 +365,7 @@ void IRAM_ATTR OpenTherm::stop_timer_() {
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#endif  // END ESP32
 | 
			
		||||
#endif  // USE_ESP32
 | 
			
		||||
 | 
			
		||||
#ifdef ESP8266
 | 
			
		||||
// 5 kHz timer_
 | 
			
		||||
 
 | 
			
		||||
@@ -12,7 +12,7 @@
 | 
			
		||||
#include "esphome/core/helpers.h"
 | 
			
		||||
#include "esphome/core/log.h"
 | 
			
		||||
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
#include "driver/timer.h"
 | 
			
		||||
#endif
 | 
			
		||||
 | 
			
		||||
@@ -356,7 +356,7 @@ class OpenTherm {
 | 
			
		||||
  ISRInternalGPIOPin isr_in_pin_;
 | 
			
		||||
  ISRInternalGPIOPin isr_out_pin_;
 | 
			
		||||
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  timer_group_t timer_group_;
 | 
			
		||||
  timer_idx_t timer_idx_;
 | 
			
		||||
#endif
 | 
			
		||||
@@ -370,7 +370,7 @@ class OpenTherm {
 | 
			
		||||
  int32_t timeout_counter_;  // <0 no timeout
 | 
			
		||||
  int32_t device_timeout_;
 | 
			
		||||
 | 
			
		||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
			
		||||
#ifdef USE_ESP32
 | 
			
		||||
  esp_err_t timer_error_ = ESP_OK;
 | 
			
		||||
  TimerErrorType timer_error_type_ = TimerErrorType::NO_TIMER_ERROR;
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -180,10 +180,12 @@ void OpenThreadSrpComponent::setup() {
 | 
			
		||||
    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
			
		||||
    for (size_t i = 0; i < service.txt_records.size(); i++) {
 | 
			
		||||
      const auto &txt = service.txt_records[i];
 | 
			
		||||
      auto value = const_cast<TemplatableValue<std::string> &>(txt.value).value();
 | 
			
		||||
      // Value is either a compile-time string literal in flash or a pointer to dynamic_txt_values_
 | 
			
		||||
      // OpenThread SRP client expects the data to persist, so we strdup it
 | 
			
		||||
      const char *value_str = MDNS_STR_ARG(txt.value);
 | 
			
		||||
      txt_entries[i].mKey = MDNS_STR_ARG(txt.key);
 | 
			
		||||
      txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value.c_str()));
 | 
			
		||||
      txt_entries[i].mValueLength = value.size();
 | 
			
		||||
      txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value_str));
 | 
			
		||||
      txt_entries[i].mValueLength = strlen(value_str);
 | 
			
		||||
    }
 | 
			
		||||
    entry->mService.mTxtEntries = txt_entries;
 | 
			
		||||
    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
			
		||||
 
 | 
			
		||||
@@ -347,7 +347,7 @@ def final_validate_device_schema(
 | 
			
		||||
 | 
			
		||||
    def validate_pin(opt, device):
 | 
			
		||||
        def validator(value):
 | 
			
		||||
            if opt in device:
 | 
			
		||||
            if opt in device and not CORE.testing_mode:
 | 
			
		||||
                raise cv.Invalid(
 | 
			
		||||
                    f"The uart {opt} is used both by {name} and {device[opt]}, "
 | 
			
		||||
                    f"but can only be used by one. Please create a new uart bus for {name}."
 | 
			
		||||
 
 | 
			
		||||
@@ -552,7 +552,7 @@ void WiFiComponent::start_scanning() {
 | 
			
		||||
// Using insertion sort instead of std::stable_sort saves flash memory
 | 
			
		||||
// by avoiding template instantiations (std::rotate, std::stable_sort, lambdas)
 | 
			
		||||
// IMPORTANT: This sort is stable (preserves relative order of equal elements)
 | 
			
		||||
static void insertion_sort_scan_results(std::vector<WiFiScanResult> &results) {
 | 
			
		||||
static void insertion_sort_scan_results(FixedVector<WiFiScanResult> &results) {
 | 
			
		||||
  const size_t size = results.size();
 | 
			
		||||
  for (size_t i = 1; i < size; i++) {
 | 
			
		||||
    // Make a copy to avoid issues with move semantics during comparison
 | 
			
		||||
 
 | 
			
		||||
@@ -278,7 +278,7 @@ class WiFiComponent : public Component {
 | 
			
		||||
  std::string get_use_address() const;
 | 
			
		||||
  void set_use_address(const std::string &use_address);
 | 
			
		||||
 | 
			
		||||
  const std::vector<WiFiScanResult> &get_scan_result() const { return scan_result_; }
 | 
			
		||||
  const FixedVector<WiFiScanResult> &get_scan_result() const { return scan_result_; }
 | 
			
		||||
 | 
			
		||||
  network::IPAddress wifi_soft_ap_ip();
 | 
			
		||||
 | 
			
		||||
@@ -385,7 +385,7 @@ class WiFiComponent : public Component {
 | 
			
		||||
  std::string use_address_;
 | 
			
		||||
  std::vector<WiFiAP> sta_;
 | 
			
		||||
  std::vector<WiFiSTAPriority> sta_priorities_;
 | 
			
		||||
  std::vector<WiFiScanResult> scan_result_;
 | 
			
		||||
  FixedVector<WiFiScanResult> scan_result_;
 | 
			
		||||
  WiFiAP selected_ap_;
 | 
			
		||||
  WiFiAP ap_;
 | 
			
		||||
  optional<float> output_power_;
 | 
			
		||||
 
 | 
			
		||||
@@ -696,7 +696,15 @@ void WiFiComponent::wifi_scan_done_callback_(void *arg, STATUS status) {
 | 
			
		||||
    this->retry_connect();
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Count the number of results first
 | 
			
		||||
  auto *head = reinterpret_cast<bss_info *>(arg);
 | 
			
		||||
  size_t count = 0;
 | 
			
		||||
  for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
 | 
			
		||||
    count++;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  this->scan_result_.init(count);
 | 
			
		||||
  for (bss_info *it = head; it != nullptr; it = STAILQ_NEXT(it, next)) {
 | 
			
		||||
    WiFiScanResult res({it->bssid[0], it->bssid[1], it->bssid[2], it->bssid[3], it->bssid[4], it->bssid[5]},
 | 
			
		||||
                       std::string(reinterpret_cast<char *>(it->ssid), it->ssid_len), it->channel, it->rssi,
 | 
			
		||||
 
 | 
			
		||||
@@ -763,8 +763,9 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
 | 
			
		||||
    const auto &it = data->data.sta_scan_done;
 | 
			
		||||
    ESP_LOGV(TAG, "Scan done: status=%" PRIu32 " number=%u scan_id=%u", it.status, it.number, it.scan_id);
 | 
			
		||||
 | 
			
		||||
    scan_result_.clear();
 | 
			
		||||
    this->scan_done_ = true;
 | 
			
		||||
    scan_result_.clear();
 | 
			
		||||
 | 
			
		||||
    if (it.status != 0) {
 | 
			
		||||
      // scan error
 | 
			
		||||
      return;
 | 
			
		||||
@@ -784,7 +785,7 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) {
 | 
			
		||||
    }
 | 
			
		||||
    records.resize(number);
 | 
			
		||||
 | 
			
		||||
    scan_result_.reserve(number);
 | 
			
		||||
    scan_result_.init(number);
 | 
			
		||||
    for (int i = 0; i < number; i++) {
 | 
			
		||||
      auto &record = records[i];
 | 
			
		||||
      bssid_t bssid;
 | 
			
		||||
 
 | 
			
		||||
@@ -411,7 +411,7 @@ void WiFiComponent::wifi_scan_done_callback_() {
 | 
			
		||||
  if (num < 0)
 | 
			
		||||
    return;
 | 
			
		||||
 | 
			
		||||
  this->scan_result_.reserve(static_cast<unsigned int>(num));
 | 
			
		||||
  this->scan_result_.init(static_cast<unsigned int>(num));
 | 
			
		||||
  for (int i = 0; i < num; i++) {
 | 
			
		||||
    String ssid = WiFi.SSID(i);
 | 
			
		||||
    wifi_auth_mode_t authmode = WiFi.encryptionType(i);
 | 
			
		||||
 
 | 
			
		||||
@@ -4,7 +4,7 @@ from enum import Enum
 | 
			
		||||
 | 
			
		||||
from esphome.enum import StrEnum
 | 
			
		||||
 | 
			
		||||
__version__ = "2025.10.0b1"
 | 
			
		||||
__version__ = "2025.11.0-dev"
 | 
			
		||||
 | 
			
		||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
 | 
			
		||||
VALID_SUBSTITUTIONS_CHARACTERS = (
 | 
			
		||||
 
 | 
			
		||||
@@ -529,6 +529,8 @@ class EsphomeCore:
 | 
			
		||||
        self.dashboard = False
 | 
			
		||||
        # True if command is run from vscode api
 | 
			
		||||
        self.vscode = False
 | 
			
		||||
        # True if running in testing mode (disables validation checks for grouped testing)
 | 
			
		||||
        self.testing_mode = False
 | 
			
		||||
        # The name of the node
 | 
			
		||||
        self.name: str | None = None
 | 
			
		||||
        # The friendly name of the node
 | 
			
		||||
 
 | 
			
		||||
@@ -340,8 +340,8 @@ void Application::calculate_looping_components_() {
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Pre-reserve vector to avoid reallocations
 | 
			
		||||
  this->looping_components_.reserve(total_looping);
 | 
			
		||||
  // Initialize FixedVector with exact size - no reallocation possible
 | 
			
		||||
  this->looping_components_.init(total_looping);
 | 
			
		||||
 | 
			
		||||
  // Add all components with loop override that aren't already LOOP_DONE
 | 
			
		||||
  // Some components (like logger) may call disable_loop() during initialization
 | 
			
		||||
 
 | 
			
		||||
@@ -472,7 +472,7 @@ class Application {
 | 
			
		||||
  // - When a component is enabled, it's swapped with the first inactive component
 | 
			
		||||
  //   and active_end_ is incremented
 | 
			
		||||
  // - This eliminates branch mispredictions from flag checking in the hot loop
 | 
			
		||||
  std::vector<Component *> looping_components_{};
 | 
			
		||||
  FixedVector<Component *> looping_components_{};
 | 
			
		||||
#ifdef USE_SOCKET_SELECT_SUPPORT
 | 
			
		||||
  std::vector<int> socket_fds_;  // Vector of all monitored socket file descriptors
 | 
			
		||||
#endif
 | 
			
		||||
 
 | 
			
		||||
@@ -84,6 +84,7 @@
 | 
			
		||||
#define USE_LVGL_TOUCHSCREEN
 | 
			
		||||
#define USE_MDNS
 | 
			
		||||
#define MDNS_SERVICE_COUNT 3
 | 
			
		||||
#define MDNS_DYNAMIC_TXT_COUNT 3
 | 
			
		||||
#define USE_MEDIA_PLAYER
 | 
			
		||||
#define USE_NEXTION_TFT_UPLOAD
 | 
			
		||||
#define USE_NUMBER
 | 
			
		||||
@@ -174,6 +175,8 @@
 | 
			
		||||
#define USE_ESP32_BLE_SERVER_DESCRIPTOR_ON_WRITE
 | 
			
		||||
#define USE_ESP32_BLE_SERVER_ON_CONNECT
 | 
			
		||||
#define USE_ESP32_BLE_SERVER_ON_DISCONNECT
 | 
			
		||||
#define ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT 1
 | 
			
		||||
#define ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT 1
 | 
			
		||||
#define USE_ESP32_CAMERA_JPEG_ENCODER
 | 
			
		||||
#define USE_I2C
 | 
			
		||||
#define USE_IMPROV
 | 
			
		||||
 
 | 
			
		||||
@@ -246,6 +246,9 @@ def entity_duplicate_validator(platform: str) -> Callable[[ConfigType], ConfigTy
 | 
			
		||||
                    "\n          to distinguish them"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # Skip duplicate entity name validation when testing_mode is enabled
 | 
			
		||||
            # This flag is used for grouped component testing
 | 
			
		||||
            if not CORE.testing_mode:
 | 
			
		||||
                raise cv.Invalid(
 | 
			
		||||
                    f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
 | 
			
		||||
                    f"{conflict_msg}. "
 | 
			
		||||
 
 | 
			
		||||
@@ -159,6 +159,80 @@ template<typename T, size_t N> class StaticVector {
 | 
			
		||||
  const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
/// Fixed-capacity vector - allocates once at runtime, never reallocates
 | 
			
		||||
/// This avoids std::vector template overhead (_M_realloc_insert, _M_default_append)
 | 
			
		||||
/// when size is known at initialization but not at compile time
 | 
			
		||||
template<typename T> class FixedVector {
 | 
			
		||||
 private:
 | 
			
		||||
  T *data_{nullptr};
 | 
			
		||||
  size_t size_{0};
 | 
			
		||||
  size_t capacity_{0};
 | 
			
		||||
 | 
			
		||||
  // Helper to destroy elements and free memory
 | 
			
		||||
  void cleanup_() {
 | 
			
		||||
    if (data_ != nullptr) {
 | 
			
		||||
      // Manually destroy all elements
 | 
			
		||||
      for (size_t i = 0; i < size_; i++) {
 | 
			
		||||
        data_[i].~T();
 | 
			
		||||
      }
 | 
			
		||||
      // Free raw memory
 | 
			
		||||
      ::operator delete(data_);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
 public:
 | 
			
		||||
  FixedVector() = default;
 | 
			
		||||
 | 
			
		||||
  ~FixedVector() { cleanup_(); }
 | 
			
		||||
 | 
			
		||||
  // Disable copy to avoid accidental copies
 | 
			
		||||
  FixedVector(const FixedVector &) = delete;
 | 
			
		||||
  FixedVector &operator=(const FixedVector &) = delete;
 | 
			
		||||
 | 
			
		||||
  // Allocate capacity - can be called multiple times to reinit
 | 
			
		||||
  void init(size_t n) {
 | 
			
		||||
    cleanup_();
 | 
			
		||||
    data_ = nullptr;
 | 
			
		||||
    capacity_ = 0;
 | 
			
		||||
    size_ = 0;
 | 
			
		||||
    if (n > 0) {
 | 
			
		||||
      // Allocate raw memory without calling constructors
 | 
			
		||||
      data_ = static_cast<T *>(::operator new(n * sizeof(T)));
 | 
			
		||||
      capacity_ = n;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  // Clear the vector (reset size to 0, keep capacity)
 | 
			
		||||
  void clear() { size_ = 0; }
 | 
			
		||||
 | 
			
		||||
  // Check if vector is empty
 | 
			
		||||
  bool empty() const { return size_ == 0; }
 | 
			
		||||
 | 
			
		||||
  /// Add element without bounds checking
 | 
			
		||||
  /// Caller must ensure sufficient capacity was allocated via init()
 | 
			
		||||
  /// Silently ignores pushes beyond capacity (no exception or assertion)
 | 
			
		||||
  void push_back(const T &value) {
 | 
			
		||||
    if (size_ < capacity_) {
 | 
			
		||||
      // Use placement new to construct the object in pre-allocated memory
 | 
			
		||||
      new (&data_[size_]) T(value);
 | 
			
		||||
      size_++;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  size_t size() const { return size_; }
 | 
			
		||||
 | 
			
		||||
  /// Access element without bounds checking (matches std::vector behavior)
 | 
			
		||||
  /// Caller must ensure index is valid (i < size())
 | 
			
		||||
  T &operator[](size_t i) { return data_[i]; }
 | 
			
		||||
  const T &operator[](size_t i) const { return data_[i]; }
 | 
			
		||||
 | 
			
		||||
  // Iterator support for range-based for loops
 | 
			
		||||
  T *begin() { return data_; }
 | 
			
		||||
  T *end() { return data_ + size_; }
 | 
			
		||||
  const T *begin() const { return data_; }
 | 
			
		||||
  const T *end() const { return data_ + size_; }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
///@}
 | 
			
		||||
 | 
			
		||||
/// @name Mathematics
 | 
			
		||||
 
 | 
			
		||||
@@ -118,11 +118,11 @@ class PinRegistry(dict):
 | 
			
		||||
                        parent_config = fconf.get_config_for_path(parent_path)
 | 
			
		||||
                        final_val_fun(pin_config, parent_config)
 | 
			
		||||
                    allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
 | 
			
		||||
                    if count != 1 and not allow_others:
 | 
			
		||||
                    if count != 1 and not allow_others and not CORE.testing_mode:
 | 
			
		||||
                        raise cv.Invalid(
 | 
			
		||||
                            f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
 | 
			
		||||
                        )
 | 
			
		||||
                    if count == 1 and allow_others:
 | 
			
		||||
                    if count == 1 and allow_others and not CORE.testing_mode:
 | 
			
		||||
                        raise cv.Invalid(
 | 
			
		||||
                            f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
 | 
			
		||||
                        )
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,7 @@ import os
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import re
 | 
			
		||||
import subprocess
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
 | 
			
		||||
from esphome.core import CORE, EsphomeError
 | 
			
		||||
@@ -42,6 +43,35 @@ def patch_structhash():
 | 
			
		||||
    cli.clean_build_dir = patched_clean_build_dir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def patch_file_downloader():
 | 
			
		||||
    """Patch PlatformIO's FileDownloader to retry on PackageException errors."""
 | 
			
		||||
    from platformio.package.download import FileDownloader
 | 
			
		||||
    from platformio.package.exception import PackageException
 | 
			
		||||
 | 
			
		||||
    original_init = FileDownloader.__init__
 | 
			
		||||
 | 
			
		||||
    def patched_init(self, *args: Any, **kwargs: Any) -> None:
 | 
			
		||||
        max_retries = 3
 | 
			
		||||
 | 
			
		||||
        for attempt in range(max_retries):
 | 
			
		||||
            try:
 | 
			
		||||
                return original_init(self, *args, **kwargs)
 | 
			
		||||
            except PackageException as e:
 | 
			
		||||
                if attempt < max_retries - 1:
 | 
			
		||||
                    _LOGGER.warning(
 | 
			
		||||
                        "Package download failed: %s. Retrying... (attempt %d/%d)",
 | 
			
		||||
                        str(e),
 | 
			
		||||
                        attempt + 1,
 | 
			
		||||
                        max_retries,
 | 
			
		||||
                    )
 | 
			
		||||
                else:
 | 
			
		||||
                    # Final attempt - re-raise
 | 
			
		||||
                    raise
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    FileDownloader.__init__ = patched_init
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
 | 
			
		||||
FILTER_PLATFORMIO_LINES = [
 | 
			
		||||
    r"Verbose mode can be enabled via `-v, --verbose` option.*",
 | 
			
		||||
@@ -99,6 +129,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
 | 
			
		||||
    import platformio.__main__
 | 
			
		||||
 | 
			
		||||
    patch_structhash()
 | 
			
		||||
    patch_file_downloader()
 | 
			
		||||
    return run_external_command(platformio.__main__.main, *cmd, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ pyserial==3.5
 | 
			
		||||
platformio==6.1.18  # When updating platformio, also update /docker/Dockerfile
 | 
			
		||||
esptool==5.1.0
 | 
			
		||||
click==8.1.7
 | 
			
		||||
esphome-dashboard==20250904.0
 | 
			
		||||
esphome-dashboard==20251009.0
 | 
			
		||||
aioesphomeapi==41.13.0
 | 
			
		||||
zeroconf==0.148.0
 | 
			
		||||
puremagic==1.30
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
pylint==3.3.9
 | 
			
		||||
flake8==7.3.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
ruff==0.14.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
pyupgrade==3.20.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
pyupgrade==3.21.0  # also change in .pre-commit-config.yaml when updating
 | 
			
		||||
pre-commit
 | 
			
		||||
 | 
			
		||||
# Unit tests
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										523
									
								
								script/analyze_component_buses.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										523
									
								
								script/analyze_component_buses.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,523 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
"""Analyze component test files to detect which common bus configs they use.
 | 
			
		||||
 | 
			
		||||
This script scans component test files and extracts which common bus configurations
 | 
			
		||||
(i2c, spi, uart, etc.) are included via the packages mechanism. This information
 | 
			
		||||
is used to group components that can be tested together.
 | 
			
		||||
 | 
			
		||||
Components can only be grouped together if they use the EXACT SAME set of common
 | 
			
		||||
bus configurations, ensuring that merged configs are compatible.
 | 
			
		||||
 | 
			
		||||
Example output:
 | 
			
		||||
{
 | 
			
		||||
    "component1": {
 | 
			
		||||
        "esp32-ard": ["i2c", "uart_19200"],
 | 
			
		||||
        "esp32-idf": ["i2c", "uart_19200"]
 | 
			
		||||
    },
 | 
			
		||||
    "component2": {
 | 
			
		||||
        "esp32-ard": ["spi"],
 | 
			
		||||
        "esp32-idf": ["spi"]
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
from functools import lru_cache
 | 
			
		||||
import json
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
# Add esphome to path
 | 
			
		||||
sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
			
		||||
 | 
			
		||||
from esphome import yaml_util
 | 
			
		||||
from esphome.config_helpers import Extend, Remove
 | 
			
		||||
 | 
			
		||||
# Path to common bus configs
 | 
			
		||||
COMMON_BUS_PATH = Path("tests/test_build_components/common")
 | 
			
		||||
 | 
			
		||||
# Package dependencies - maps packages to the packages they include
 | 
			
		||||
# When a component uses a package on the left, it automatically gets
 | 
			
		||||
# the packages on the right as well
 | 
			
		||||
PACKAGE_DEPENDENCIES = {
 | 
			
		||||
    "modbus": ["uart"],  # modbus packages include uart packages
 | 
			
		||||
    # Add more package dependencies here as needed
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Bus types that can be defined directly in config files
 | 
			
		||||
# Components defining these directly cannot be grouped (they create unique bus IDs)
 | 
			
		||||
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
 | 
			
		||||
 | 
			
		||||
# Signature for components with no bus requirements
 | 
			
		||||
# These components can be merged with any other group
 | 
			
		||||
NO_BUSES_SIGNATURE = "no_buses"
 | 
			
		||||
 | 
			
		||||
# Base bus components - these ARE the bus implementations and should not
 | 
			
		||||
# be flagged as needing migration since they are the platform/base components
 | 
			
		||||
BASE_BUS_COMPONENTS = {
 | 
			
		||||
    "i2c",
 | 
			
		||||
    "spi",
 | 
			
		||||
    "uart",
 | 
			
		||||
    "modbus",
 | 
			
		||||
    "canbus",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Components that must be tested in isolation (not grouped or batched with others)
 | 
			
		||||
# These have known build issues that prevent grouping
 | 
			
		||||
# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py
 | 
			
		||||
ISOLATED_COMPONENTS = {
 | 
			
		||||
    "animation": "Has display lambda in common.yaml that requires existing display platform - breaks when merged without display",
 | 
			
		||||
    "esphome": "Defines devices/areas in esphome: section that are referenced in other sections - breaks when merged",
 | 
			
		||||
    "ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
 | 
			
		||||
    "ethernet_info": "Related to ethernet component which conflicts with wifi",
 | 
			
		||||
    "lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
 | 
			
		||||
    "matrix_keypad": "Needs isolation due to keypad",
 | 
			
		||||
    "mcp4725": "no YAML config to specify i2c bus id",
 | 
			
		||||
    "mcp47a1": "no YAML config to specify i2c bus id",
 | 
			
		||||
    "modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
 | 
			
		||||
    "neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
 | 
			
		||||
    "packages": "cannot merge packages",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@lru_cache(maxsize=1)
 | 
			
		||||
def get_common_bus_packages() -> frozenset[str]:
 | 
			
		||||
    """Get the list of common bus package names.
 | 
			
		||||
 | 
			
		||||
    Reads from tests/test_build_components/common/ directory
 | 
			
		||||
    and caches the result. All bus types support component grouping
 | 
			
		||||
    for config validation since --testing-mode bypasses runtime conflicts.
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Frozenset of common bus package names (i2c, spi, uart, etc.)
 | 
			
		||||
    """
 | 
			
		||||
    if not COMMON_BUS_PATH.exists():
 | 
			
		||||
        return frozenset()
 | 
			
		||||
 | 
			
		||||
    # List all directories in common/ - these are the bus package names
 | 
			
		||||
    return frozenset(d.name for d in COMMON_BUS_PATH.iterdir() if d.is_dir())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def uses_local_file_references(component_dir: Path) -> bool:
 | 
			
		||||
    """Check if a component uses local file references via $component_dir.
 | 
			
		||||
 | 
			
		||||
    Components that reference local files cannot be grouped because each needs
 | 
			
		||||
    a unique component_dir path pointing to their specific directory.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_dir: Path to the component's test directory
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        True if the component uses $component_dir for local file references
 | 
			
		||||
    """
 | 
			
		||||
    common_yaml = component_dir / "common.yaml"
 | 
			
		||||
    if not common_yaml.exists():
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        content = common_yaml.read_text()
 | 
			
		||||
    except Exception:  # pylint: disable=broad-exception-caught
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    # Pattern to match $component_dir or ${component_dir} references
 | 
			
		||||
    # These indicate local file usage that prevents grouping
 | 
			
		||||
    return bool(re.search(r"\$\{?component_dir\}?", content))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def is_platform_component(component_dir: Path) -> bool:
 | 
			
		||||
    """Check if a component is a platform component (abstract base class).
 | 
			
		||||
 | 
			
		||||
    Platform components have IS_PLATFORM_COMPONENT = True and cannot be
 | 
			
		||||
    instantiated without a platform-specific implementation. These components
 | 
			
		||||
    define abstract methods and cause linker errors if compiled standalone.
 | 
			
		||||
 | 
			
		||||
    Examples: canbus, mcp23x08_base, mcp23x17_base
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_dir: Path to the component's test directory
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        True if this is a platform component
 | 
			
		||||
    """
 | 
			
		||||
    # Check in the actual component source, not tests
 | 
			
		||||
    # tests/components/X -> tests/components -> tests -> repo root
 | 
			
		||||
    repo_root = component_dir.parent.parent.parent
 | 
			
		||||
    comp_init = (
 | 
			
		||||
        repo_root / "esphome" / "components" / component_dir.name / "__init__.py"
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if not comp_init.exists():
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        content = comp_init.read_text()
 | 
			
		||||
        return "IS_PLATFORM_COMPONENT = True" in content
 | 
			
		||||
    except Exception:  # pylint: disable=broad-exception-caught
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _contains_extend_or_remove(data: Any) -> bool:
 | 
			
		||||
    """Recursively check if data contains Extend or Remove objects.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        data: Parsed YAML data structure
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        True if any Extend or Remove objects are found
 | 
			
		||||
    """
 | 
			
		||||
    if isinstance(data, (Extend, Remove)):
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    if isinstance(data, dict):
 | 
			
		||||
        for value in data.values():
 | 
			
		||||
            if _contains_extend_or_remove(value):
 | 
			
		||||
                return True
 | 
			
		||||
 | 
			
		||||
    if isinstance(data, list):
 | 
			
		||||
        for item in data:
 | 
			
		||||
            if _contains_extend_or_remove(item):
 | 
			
		||||
                return True
 | 
			
		||||
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def analyze_yaml_file(yaml_file: Path) -> dict[str, Any]:
 | 
			
		||||
    """Load a YAML file once and extract all needed information.
 | 
			
		||||
 | 
			
		||||
    This loads the YAML file a single time and extracts all information needed
 | 
			
		||||
    for component analysis, avoiding multiple file reads.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        yaml_file: Path to the YAML file to analyze
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Dictionary with keys:
 | 
			
		||||
        - buses: set of common bus package names
 | 
			
		||||
        - has_extend_remove: bool indicating if Extend/Remove objects are present
 | 
			
		||||
        - has_direct_bus_config: bool indicating if buses are defined directly (not via packages)
 | 
			
		||||
        - loaded: bool indicating if file was successfully loaded
 | 
			
		||||
    """
 | 
			
		||||
    result = {
 | 
			
		||||
        "buses": set(),
 | 
			
		||||
        "has_extend_remove": False,
 | 
			
		||||
        "has_direct_bus_config": False,
 | 
			
		||||
        "loaded": False,
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if not yaml_file.exists():
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        data = yaml_util.load_yaml(yaml_file)
 | 
			
		||||
        result["loaded"] = True
 | 
			
		||||
    except Exception:  # pylint: disable=broad-exception-caught
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    # Check for Extend/Remove objects
 | 
			
		||||
    result["has_extend_remove"] = _contains_extend_or_remove(data)
 | 
			
		||||
 | 
			
		||||
    # Check if buses are defined directly (not via packages)
 | 
			
		||||
    # Components that define i2c, spi, uart, or modbus directly in test files
 | 
			
		||||
    # cannot be grouped because they create unique bus IDs
 | 
			
		||||
    if isinstance(data, dict):
 | 
			
		||||
        for bus_type in DIRECT_BUS_TYPES:
 | 
			
		||||
            if bus_type in data:
 | 
			
		||||
                result["has_direct_bus_config"] = True
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
    # Extract common bus packages
 | 
			
		||||
    if not isinstance(data, dict) or "packages" not in data:
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    packages = data["packages"]
 | 
			
		||||
    if not isinstance(packages, dict):
 | 
			
		||||
        return result
 | 
			
		||||
 | 
			
		||||
    valid_buses = get_common_bus_packages()
 | 
			
		||||
    for pkg_name in packages:
 | 
			
		||||
        if pkg_name not in valid_buses:
 | 
			
		||||
            continue
 | 
			
		||||
        result["buses"].add(pkg_name)
 | 
			
		||||
        # Add any package dependencies (e.g., modbus includes uart)
 | 
			
		||||
        if pkg_name not in PACKAGE_DEPENDENCIES:
 | 
			
		||||
            continue
 | 
			
		||||
        for dep in PACKAGE_DEPENDENCIES[pkg_name]:
 | 
			
		||||
            if dep not in valid_buses:
 | 
			
		||||
                continue
 | 
			
		||||
            result["buses"].add(dep)
 | 
			
		||||
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def analyze_component(component_dir: Path) -> tuple[dict[str, list[str]], bool, bool]:
 | 
			
		||||
    """Analyze a component directory to find which buses each platform uses.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_dir: Path to the component's test directory
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of:
 | 
			
		||||
        - Dictionary mapping platform to list of bus configs
 | 
			
		||||
          Example: {"esp32-ard": ["i2c", "spi"], "esp32-idf": ["i2c"]}
 | 
			
		||||
        - Boolean indicating if component uses !extend or !remove
 | 
			
		||||
        - Boolean indicating if component defines buses directly (not via packages)
 | 
			
		||||
    """
 | 
			
		||||
    if not component_dir.is_dir():
 | 
			
		||||
        return {}, False, False
 | 
			
		||||
 | 
			
		||||
    platform_buses = {}
 | 
			
		||||
    has_extend_remove = False
 | 
			
		||||
    has_direct_bus_config = False
 | 
			
		||||
 | 
			
		||||
    # Analyze all YAML files in the component directory
 | 
			
		||||
    for yaml_file in component_dir.glob("*.yaml"):
 | 
			
		||||
        analysis = analyze_yaml_file(yaml_file)
 | 
			
		||||
 | 
			
		||||
        # Track if any file uses extend/remove
 | 
			
		||||
        if analysis["has_extend_remove"]:
 | 
			
		||||
            has_extend_remove = True
 | 
			
		||||
 | 
			
		||||
        # Track if any file defines buses directly
 | 
			
		||||
        if analysis["has_direct_bus_config"]:
 | 
			
		||||
            has_direct_bus_config = True
 | 
			
		||||
 | 
			
		||||
        # For test.*.yaml files, extract platform and buses
 | 
			
		||||
        if yaml_file.name.startswith("test.") and yaml_file.suffix == ".yaml":
 | 
			
		||||
            # Extract platform name (e.g., test.esp32-ard.yaml -> esp32-ard)
 | 
			
		||||
            platform = yaml_file.stem.replace("test.", "")
 | 
			
		||||
            # Always add platform, even if it has no buses (empty list)
 | 
			
		||||
            # This allows grouping components that don't use any shared buses
 | 
			
		||||
            platform_buses[platform] = (
 | 
			
		||||
                sorted(analysis["buses"]) if analysis["buses"] else []
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    return platform_buses, has_extend_remove, has_direct_bus_config
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def analyze_all_components(
 | 
			
		||||
    tests_dir: Path = None,
 | 
			
		||||
) -> tuple[dict[str, dict[str, list[str]]], set[str], set[str]]:
 | 
			
		||||
    """Analyze all component test directories.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        tests_dir: Path to tests/components directory (defaults to auto-detect)
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of:
 | 
			
		||||
        - Dictionary mapping component name to platform->buses mapping
 | 
			
		||||
        - Set of component names that cannot be grouped
 | 
			
		||||
        - Set of component names that define buses directly (need migration warning)
 | 
			
		||||
    """
 | 
			
		||||
    if tests_dir is None:
 | 
			
		||||
        tests_dir = Path("tests/components")
 | 
			
		||||
 | 
			
		||||
    if not tests_dir.exists():
 | 
			
		||||
        print(f"Error: {tests_dir} does not exist", file=sys.stderr)
 | 
			
		||||
        return {}, set(), set()
 | 
			
		||||
 | 
			
		||||
    components = {}
 | 
			
		||||
    non_groupable = set()
 | 
			
		||||
    direct_bus_components = set()
 | 
			
		||||
 | 
			
		||||
    for component_dir in sorted(tests_dir.iterdir()):
 | 
			
		||||
        if not component_dir.is_dir():
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        component_name = component_dir.name
 | 
			
		||||
        platform_buses, has_extend_remove, has_direct_bus_config = analyze_component(
 | 
			
		||||
            component_dir
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if platform_buses:
 | 
			
		||||
            components[component_name] = platform_buses
 | 
			
		||||
 | 
			
		||||
        # Note: Components using $component_dir are now groupable because the merge
 | 
			
		||||
        # script rewrites these to absolute paths with component-specific substitutions
 | 
			
		||||
 | 
			
		||||
        # Check if component is explicitly isolated
 | 
			
		||||
        # These have known issues that prevent grouping with other components
 | 
			
		||||
        if component_name in ISOLATED_COMPONENTS:
 | 
			
		||||
            non_groupable.add(component_name)
 | 
			
		||||
 | 
			
		||||
        # Check if component is a base bus component
 | 
			
		||||
        # These ARE the bus platform implementations and define buses directly for testing
 | 
			
		||||
        # They cannot be grouped with components that use bus packages (causes ID conflicts)
 | 
			
		||||
        if component_name in BASE_BUS_COMPONENTS:
 | 
			
		||||
            non_groupable.add(component_name)
 | 
			
		||||
 | 
			
		||||
        # Check if component uses !extend or !remove directives
 | 
			
		||||
        # These rely on specific config structure and cannot be merged with other components
 | 
			
		||||
        # The directives work within a component's own package hierarchy but break when
 | 
			
		||||
        # merging independent components together
 | 
			
		||||
        if has_extend_remove:
 | 
			
		||||
            non_groupable.add(component_name)
 | 
			
		||||
 | 
			
		||||
        # Check if component defines buses directly in test files
 | 
			
		||||
        # These create unique bus IDs and cause conflicts when merged
 | 
			
		||||
        # Exclude base bus components (i2c, spi, uart, etc.) since they ARE the platform
 | 
			
		||||
        if has_direct_bus_config and component_name not in BASE_BUS_COMPONENTS:
 | 
			
		||||
            non_groupable.add(component_name)
 | 
			
		||||
            direct_bus_components.add(component_name)
 | 
			
		||||
 | 
			
		||||
    return components, non_groupable, direct_bus_components
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_grouping_signature(
 | 
			
		||||
    platform_buses: dict[str, list[str]], platform: str
 | 
			
		||||
) -> str:
 | 
			
		||||
    """Create a signature string for grouping components.
 | 
			
		||||
 | 
			
		||||
    Components with the same signature can be grouped together for testing.
 | 
			
		||||
    All valid bus types can be grouped since --testing-mode bypasses runtime
 | 
			
		||||
    conflicts during config validation.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        platform_buses: Mapping of platform to list of buses
 | 
			
		||||
        platform: The specific platform to create signature for
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Signature string (e.g., "i2c" or "uart") or empty if no valid buses
 | 
			
		||||
    """
 | 
			
		||||
    buses = platform_buses.get(platform, [])
 | 
			
		||||
    if not buses:
 | 
			
		||||
        return ""
 | 
			
		||||
 | 
			
		||||
    # Only include valid bus types in signature
 | 
			
		||||
    common_buses = get_common_bus_packages()
 | 
			
		||||
    valid_buses = [b for b in buses if b in common_buses]
 | 
			
		||||
    if not valid_buses:
 | 
			
		||||
        return ""
 | 
			
		||||
 | 
			
		||||
    return "+".join(sorted(valid_buses))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def group_components_by_signature(
 | 
			
		||||
    components: dict[str, dict[str, list[str]]], platform: str
 | 
			
		||||
) -> dict[str, list[str]]:
 | 
			
		||||
    """Group components by their bus signature for a specific platform.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        components: Component analysis results from analyze_all_components()
 | 
			
		||||
        platform: Platform to group for (e.g., "esp32-ard")
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Dictionary mapping signature to list of component names
 | 
			
		||||
        Example: {"i2c+uart_19200": ["comp1", "comp2"], "spi": ["comp3"]}
 | 
			
		||||
    """
 | 
			
		||||
    signature_groups: dict[str, list[str]] = {}
 | 
			
		||||
 | 
			
		||||
    for component_name, platform_buses in components.items():
 | 
			
		||||
        if platform not in platform_buses:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        signature = create_grouping_signature(platform_buses, platform)
 | 
			
		||||
        if not signature:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        if signature not in signature_groups:
 | 
			
		||||
            signature_groups[signature] = []
 | 
			
		||||
        signature_groups[signature].append(component_name)
 | 
			
		||||
 | 
			
		||||
    return signature_groups
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main() -> None:
 | 
			
		||||
    """Main entry point."""
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Analyze component test files to detect common bus usage"
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--components",
 | 
			
		||||
        "-c",
 | 
			
		||||
        nargs="+",
 | 
			
		||||
        help="Specific components to analyze (default: all)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--platform",
 | 
			
		||||
        "-p",
 | 
			
		||||
        help="Show grouping for a specific platform",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--json",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Output as JSON",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--group",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Show component groupings by bus signature",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # Analyze components
 | 
			
		||||
    tests_dir = Path("tests/components")
 | 
			
		||||
 | 
			
		||||
    if args.components:
 | 
			
		||||
        # Analyze only specified components
 | 
			
		||||
        components = {}
 | 
			
		||||
        non_groupable = set()
 | 
			
		||||
        direct_bus_components = set()
 | 
			
		||||
        for comp in args.components:
 | 
			
		||||
            comp_dir = tests_dir / comp
 | 
			
		||||
            platform_buses, has_extend_remove, has_direct_bus_config = (
 | 
			
		||||
                analyze_component(comp_dir)
 | 
			
		||||
            )
 | 
			
		||||
            if platform_buses:
 | 
			
		||||
                components[comp] = platform_buses
 | 
			
		||||
            # Note: Components using $component_dir are now groupable
 | 
			
		||||
            if comp in ISOLATED_COMPONENTS:
 | 
			
		||||
                non_groupable.add(comp)
 | 
			
		||||
            if comp in BASE_BUS_COMPONENTS:
 | 
			
		||||
                non_groupable.add(comp)
 | 
			
		||||
            if has_direct_bus_config and comp not in BASE_BUS_COMPONENTS:
 | 
			
		||||
                non_groupable.add(comp)
 | 
			
		||||
                direct_bus_components.add(comp)
 | 
			
		||||
    else:
 | 
			
		||||
        # Analyze all components
 | 
			
		||||
        components, non_groupable, direct_bus_components = analyze_all_components(
 | 
			
		||||
            tests_dir
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # Output results
 | 
			
		||||
    if args.group and args.platform:
 | 
			
		||||
        # Show groupings for a specific platform
 | 
			
		||||
        groups = group_components_by_signature(components, args.platform)
 | 
			
		||||
 | 
			
		||||
        if args.json:
 | 
			
		||||
            print(json.dumps(groups, indent=2))
 | 
			
		||||
        else:
 | 
			
		||||
            print(f"Component groupings for {args.platform}:")
 | 
			
		||||
            print()
 | 
			
		||||
            for signature, comp_list in sorted(groups.items()):
 | 
			
		||||
                print(f"  {signature}:")
 | 
			
		||||
                for comp in sorted(comp_list):
 | 
			
		||||
                    print(f"    - {comp}")
 | 
			
		||||
                print()
 | 
			
		||||
    elif args.json:
 | 
			
		||||
        # JSON output
 | 
			
		||||
        print(json.dumps(components, indent=2))
 | 
			
		||||
    else:
 | 
			
		||||
        # Human-readable output
 | 
			
		||||
        for component, platform_buses in sorted(components.items()):
 | 
			
		||||
            non_groupable_marker = (
 | 
			
		||||
                " [NON-GROUPABLE]" if component in non_groupable else ""
 | 
			
		||||
            )
 | 
			
		||||
            print(f"{component}{non_groupable_marker}:")
 | 
			
		||||
            for platform, buses in sorted(platform_buses.items()):
 | 
			
		||||
                bus_str = ", ".join(buses)
 | 
			
		||||
                print(f"  {platform}: {bus_str}")
 | 
			
		||||
        print()
 | 
			
		||||
        print(f"Total components analyzed: {len(components)}")
 | 
			
		||||
        if non_groupable:
 | 
			
		||||
            print(f"Non-groupable components (use local files): {len(non_groupable)}")
 | 
			
		||||
            for comp in sorted(non_groupable):
 | 
			
		||||
                print(f"  - {comp}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    main()
 | 
			
		||||
@@ -237,6 +237,16 @@ def main() -> None:
 | 
			
		||||
    result = subprocess.run(cmd, capture_output=True, text=True, check=True)
 | 
			
		||||
    changed_components = parse_list_components_output(result.stdout)
 | 
			
		||||
 | 
			
		||||
    # Filter to only components that have test files
 | 
			
		||||
    # Components without tests shouldn't generate CI test jobs
 | 
			
		||||
    tests_dir = Path(root_path) / "tests" / "components"
 | 
			
		||||
    changed_components_with_tests = [
 | 
			
		||||
        component
 | 
			
		||||
        for component in changed_components
 | 
			
		||||
        if (component_test_dir := tests_dir / component).exists()
 | 
			
		||||
        and any(component_test_dir.glob("test.*.yaml"))
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # Build output
 | 
			
		||||
    output: dict[str, Any] = {
 | 
			
		||||
        "integration_tests": run_integration,
 | 
			
		||||
@@ -244,7 +254,8 @@ def main() -> None:
 | 
			
		||||
        "clang_format": run_clang_format,
 | 
			
		||||
        "python_linters": run_python_linters,
 | 
			
		||||
        "changed_components": changed_components,
 | 
			
		||||
        "component_test_count": len(changed_components),
 | 
			
		||||
        "changed_components_with_tests": changed_components_with_tests,
 | 
			
		||||
        "component_test_count": len(changed_components_with_tests),
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # Output as JSON
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										379
									
								
								script/merge_component_configs.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										379
									
								
								script/merge_component_configs.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,379 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
"""Merge multiple component test configurations into a single test file.
 | 
			
		||||
 | 
			
		||||
This script combines multiple component test files that use the same common bus
 | 
			
		||||
configurations into a single merged test file. This allows testing multiple
 | 
			
		||||
compatible components together, reducing CI build time.
 | 
			
		||||
 | 
			
		||||
The merger handles:
 | 
			
		||||
- Component-specific substitutions (prefixing to avoid conflicts)
 | 
			
		||||
- Multiple instances of component configurations
 | 
			
		||||
- Shared common bus packages (included only once)
 | 
			
		||||
- Platform-specific configurations
 | 
			
		||||
- Uses ESPHome's built-in merge_config for proper YAML merging
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import re
 | 
			
		||||
import sys
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
# Add esphome to path so we can import from it
 | 
			
		||||
sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
			
		||||
 | 
			
		||||
from esphome import yaml_util
 | 
			
		||||
from esphome.config_helpers import merge_config
 | 
			
		||||
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_yaml_file(yaml_file: Path) -> dict:
 | 
			
		||||
    """Load YAML file using ESPHome's YAML loader.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        yaml_file: Path to the YAML file
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Parsed YAML as dictionary
 | 
			
		||||
    """
 | 
			
		||||
    if not yaml_file.exists():
 | 
			
		||||
        raise FileNotFoundError(f"YAML file not found: {yaml_file}")
 | 
			
		||||
 | 
			
		||||
    return yaml_util.load_yaml(yaml_file)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
 | 
			
		||||
    """Extract COMMON BUS package includes from parsed YAML.
 | 
			
		||||
 | 
			
		||||
    Only extracts packages that are from test_build_components/common/,
 | 
			
		||||
    ignoring component-specific packages.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        data: Parsed YAML dictionary
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Dictionary mapping package name to include path (as string representation)
 | 
			
		||||
        Only includes common bus packages (i2c, spi, uart, etc.)
 | 
			
		||||
    """
 | 
			
		||||
    if "packages" not in data:
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    packages_value = data["packages"]
 | 
			
		||||
    if not isinstance(packages_value, dict):
 | 
			
		||||
        # List format doesn't include common bus packages (those use dict format)
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    # Get common bus package names (cached)
 | 
			
		||||
    common_bus_packages = get_common_bus_packages()
 | 
			
		||||
    packages = {}
 | 
			
		||||
 | 
			
		||||
    # Dictionary format: packages: {name: value}
 | 
			
		||||
    for name, value in packages_value.items():
 | 
			
		||||
        # Only include common bus packages, ignore component-specific ones
 | 
			
		||||
        if name not in common_bus_packages:
 | 
			
		||||
            continue
 | 
			
		||||
        packages[name] = str(value)
 | 
			
		||||
        # Also track package dependencies (e.g., modbus includes uart)
 | 
			
		||||
        if name not in PACKAGE_DEPENDENCIES:
 | 
			
		||||
            continue
 | 
			
		||||
        for dep in PACKAGE_DEPENDENCIES[name]:
 | 
			
		||||
            if dep not in common_bus_packages:
 | 
			
		||||
                continue
 | 
			
		||||
            # Mark as included via dependency
 | 
			
		||||
            packages[f"_dep_{dep}"] = f"(included via {name})"
 | 
			
		||||
 | 
			
		||||
    return packages
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def prefix_substitutions_in_dict(
 | 
			
		||||
    data: Any, prefix: str, exclude: set[str] | None = None
 | 
			
		||||
) -> Any:
 | 
			
		||||
    """Recursively prefix all substitution references in a data structure.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        data: YAML data structure (dict, list, or scalar)
 | 
			
		||||
        prefix: Prefix to add to substitution names
 | 
			
		||||
        exclude: Set of substitution names to exclude from prefixing
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Data structure with prefixed substitution references
 | 
			
		||||
    """
 | 
			
		||||
    if exclude is None:
 | 
			
		||||
        exclude = set()
 | 
			
		||||
 | 
			
		||||
    def replace_sub(text: str) -> str:
 | 
			
		||||
        """Replace substitution references in a string."""
 | 
			
		||||
 | 
			
		||||
        def replace_match(match):
 | 
			
		||||
            sub_name = match.group(1)
 | 
			
		||||
            if sub_name in exclude:
 | 
			
		||||
                return match.group(0)
 | 
			
		||||
            # Always use braced format in output for consistency
 | 
			
		||||
            return f"${{{prefix}_{sub_name}}}"
 | 
			
		||||
 | 
			
		||||
        # Match both ${substitution} and $substitution formats
 | 
			
		||||
        return re.sub(r"\$\{?(\w+)\}?", replace_match, text)
 | 
			
		||||
 | 
			
		||||
    if isinstance(data, dict):
 | 
			
		||||
        result = {}
 | 
			
		||||
        for key, value in data.items():
 | 
			
		||||
            result[key] = prefix_substitutions_in_dict(value, prefix, exclude)
 | 
			
		||||
        return result
 | 
			
		||||
    if isinstance(data, list):
 | 
			
		||||
        return [prefix_substitutions_in_dict(item, prefix, exclude) for item in data]
 | 
			
		||||
    if isinstance(data, str):
 | 
			
		||||
        return replace_sub(data)
 | 
			
		||||
    return data
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def deduplicate_by_id(data: dict) -> dict:
 | 
			
		||||
    """Deduplicate list items with the same ID.
 | 
			
		||||
 | 
			
		||||
    Keeps only the first occurrence of each ID. If items with the same ID
 | 
			
		||||
    are identical, this silently deduplicates. If they differ, the first
 | 
			
		||||
    one is kept (ESPHome's validation will catch if this causes issues).
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        data: Parsed config dictionary
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Config with deduplicated lists
 | 
			
		||||
    """
 | 
			
		||||
    if not isinstance(data, dict):
 | 
			
		||||
        return data
 | 
			
		||||
 | 
			
		||||
    result = {}
 | 
			
		||||
    for key, value in data.items():
 | 
			
		||||
        if isinstance(value, list):
 | 
			
		||||
            # Check for items with 'id' field
 | 
			
		||||
            seen_ids = set()
 | 
			
		||||
            deduped_list = []
 | 
			
		||||
 | 
			
		||||
            for item in value:
 | 
			
		||||
                if isinstance(item, dict) and "id" in item:
 | 
			
		||||
                    item_id = item["id"]
 | 
			
		||||
                    if item_id not in seen_ids:
 | 
			
		||||
                        seen_ids.add(item_id)
 | 
			
		||||
                        deduped_list.append(item)
 | 
			
		||||
                    # else: skip duplicate ID (keep first occurrence)
 | 
			
		||||
                else:
 | 
			
		||||
                    # No ID, just add it
 | 
			
		||||
                    deduped_list.append(item)
 | 
			
		||||
 | 
			
		||||
            result[key] = deduped_list
 | 
			
		||||
        elif isinstance(value, dict):
 | 
			
		||||
            # Recursively deduplicate nested dicts
 | 
			
		||||
            result[key] = deduplicate_by_id(value)
 | 
			
		||||
        else:
 | 
			
		||||
            result[key] = value
 | 
			
		||||
 | 
			
		||||
    return result
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def merge_component_configs(
 | 
			
		||||
    component_names: list[str],
 | 
			
		||||
    platform: str,
 | 
			
		||||
    tests_dir: Path,
 | 
			
		||||
    output_file: Path,
 | 
			
		||||
) -> None:
 | 
			
		||||
    """Merge multiple component test configs into a single file.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_names: List of component names to merge
 | 
			
		||||
        platform: Platform to merge for (e.g., "esp32-ard")
 | 
			
		||||
        tests_dir: Path to tests/components directory
 | 
			
		||||
        output_file: Path to output merged config file
 | 
			
		||||
    """
 | 
			
		||||
    if not component_names:
 | 
			
		||||
        raise ValueError("No components specified")
 | 
			
		||||
 | 
			
		||||
    # Track packages to ensure they're identical
 | 
			
		||||
    all_packages = None
 | 
			
		||||
 | 
			
		||||
    # Start with empty config
 | 
			
		||||
    merged_config_data = {}
 | 
			
		||||
 | 
			
		||||
    # Process each component
 | 
			
		||||
    for comp_name in component_names:
 | 
			
		||||
        comp_dir = tests_dir / comp_name
 | 
			
		||||
        test_file = comp_dir / f"test.{platform}.yaml"
 | 
			
		||||
 | 
			
		||||
        if not test_file.exists():
 | 
			
		||||
            raise FileNotFoundError(f"Test file not found: {test_file}")
 | 
			
		||||
 | 
			
		||||
        # Load the component's test file
 | 
			
		||||
        comp_data = load_yaml_file(test_file)
 | 
			
		||||
 | 
			
		||||
        # Validate packages are compatible
 | 
			
		||||
        # Components with no packages (no_buses) can merge with any group
 | 
			
		||||
        comp_packages = extract_packages_from_yaml(comp_data)
 | 
			
		||||
 | 
			
		||||
        if all_packages is None:
 | 
			
		||||
            # First component - set the baseline
 | 
			
		||||
            all_packages = comp_packages
 | 
			
		||||
        elif not comp_packages:
 | 
			
		||||
            # This component has no packages (no_buses) - it can merge with any group
 | 
			
		||||
            pass
 | 
			
		||||
        elif not all_packages:
 | 
			
		||||
            # Previous components had no packages, but this one does - adopt these packages
 | 
			
		||||
            all_packages = comp_packages
 | 
			
		||||
        elif comp_packages != all_packages:
 | 
			
		||||
            # Both have packages but they differ - this is an error
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                f"Component {comp_name} has different packages than previous components. "
 | 
			
		||||
                f"Expected: {all_packages}, Got: {comp_packages}. "
 | 
			
		||||
                f"All components must use the same common bus configs to be merged."
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # Handle $component_dir by replacing with absolute path
 | 
			
		||||
        # This allows components that use local file references to be grouped
 | 
			
		||||
        comp_abs_dir = str(comp_dir.absolute())
 | 
			
		||||
 | 
			
		||||
        # Save top-level substitutions BEFORE expanding packages
 | 
			
		||||
        # In ESPHome, top-level substitutions override package substitutions
 | 
			
		||||
        top_level_subs = (
 | 
			
		||||
            comp_data["substitutions"].copy()
 | 
			
		||||
            if "substitutions" in comp_data and comp_data["substitutions"] is not None
 | 
			
		||||
            else {}
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Expand packages - but we'll restore substitution priority after
 | 
			
		||||
        if "packages" in comp_data:
 | 
			
		||||
            packages_value = comp_data["packages"]
 | 
			
		||||
 | 
			
		||||
            if isinstance(packages_value, dict):
 | 
			
		||||
                # Dict format - check each package
 | 
			
		||||
                common_bus_packages = get_common_bus_packages()
 | 
			
		||||
                for pkg_name, pkg_value in list(packages_value.items()):
 | 
			
		||||
                    if pkg_name in common_bus_packages:
 | 
			
		||||
                        continue
 | 
			
		||||
                    if not isinstance(pkg_value, dict):
 | 
			
		||||
                        continue
 | 
			
		||||
                    # Component-specific package - expand its content into top level
 | 
			
		||||
                    comp_data = merge_config(comp_data, pkg_value)
 | 
			
		||||
            elif isinstance(packages_value, list):
 | 
			
		||||
                # List format - expand all package includes
 | 
			
		||||
                for pkg_value in packages_value:
 | 
			
		||||
                    if not isinstance(pkg_value, dict):
 | 
			
		||||
                        continue
 | 
			
		||||
                    comp_data = merge_config(comp_data, pkg_value)
 | 
			
		||||
 | 
			
		||||
            # Remove all packages (common will be re-added at the end)
 | 
			
		||||
            del comp_data["packages"]
 | 
			
		||||
 | 
			
		||||
        # Restore top-level substitution priority
 | 
			
		||||
        # Top-level substitutions override any from packages
 | 
			
		||||
        if "substitutions" not in comp_data or comp_data["substitutions"] is None:
 | 
			
		||||
            comp_data["substitutions"] = {}
 | 
			
		||||
 | 
			
		||||
        # Merge: package subs as base, top-level subs override
 | 
			
		||||
        comp_data["substitutions"].update(top_level_subs)
 | 
			
		||||
 | 
			
		||||
        # Now prefix the final merged substitutions
 | 
			
		||||
        comp_data["substitutions"] = {
 | 
			
		||||
            f"{comp_name}_{sub_name}": sub_value
 | 
			
		||||
            for sub_name, sub_value in comp_data["substitutions"].items()
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        # Add component_dir substitution with absolute path for this component
 | 
			
		||||
        comp_data["substitutions"][f"{comp_name}_component_dir"] = comp_abs_dir
 | 
			
		||||
 | 
			
		||||
        # Prefix substitution references throughout the config
 | 
			
		||||
        comp_data = prefix_substitutions_in_dict(comp_data, comp_name)
 | 
			
		||||
 | 
			
		||||
        # Use ESPHome's merge_config to merge this component into the result
 | 
			
		||||
        # merge_config handles list merging with ID-based deduplication automatically
 | 
			
		||||
        merged_config_data = merge_config(merged_config_data, comp_data)
 | 
			
		||||
 | 
			
		||||
    # Add packages back (only once, since they're identical)
 | 
			
		||||
    # IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
 | 
			
		||||
    # Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
 | 
			
		||||
    if all_packages:
 | 
			
		||||
        first_comp_data = load_yaml_file(
 | 
			
		||||
            tests_dir / component_names[0] / f"test.{platform}.yaml"
 | 
			
		||||
        )
 | 
			
		||||
        if "packages" in first_comp_data and isinstance(
 | 
			
		||||
            first_comp_data["packages"], dict
 | 
			
		||||
        ):
 | 
			
		||||
            # Filter to only include common bus packages
 | 
			
		||||
            # Only dict format can contain common bus packages
 | 
			
		||||
            common_bus_packages = get_common_bus_packages()
 | 
			
		||||
            filtered_packages = {
 | 
			
		||||
                name: value
 | 
			
		||||
                for name, value in first_comp_data["packages"].items()
 | 
			
		||||
                if name in common_bus_packages
 | 
			
		||||
            }
 | 
			
		||||
            if filtered_packages:
 | 
			
		||||
                merged_config_data["packages"] = filtered_packages
 | 
			
		||||
 | 
			
		||||
    # Deduplicate items with same ID (keeps first occurrence)
 | 
			
		||||
    merged_config_data = deduplicate_by_id(merged_config_data)
 | 
			
		||||
 | 
			
		||||
    # Remove esphome section since it will be provided by the wrapper file
 | 
			
		||||
    # The wrapper file includes this merged config via packages and provides
 | 
			
		||||
    # the proper esphome: section with name, platform, etc.
 | 
			
		||||
    if "esphome" in merged_config_data:
 | 
			
		||||
        del merged_config_data["esphome"]
 | 
			
		||||
 | 
			
		||||
    # Write merged config
 | 
			
		||||
    output_file.parent.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
    yaml_content = yaml_util.dump(merged_config_data)
 | 
			
		||||
    output_file.write_text(yaml_content)
 | 
			
		||||
 | 
			
		||||
    print(f"Successfully merged {len(component_names)} components into {output_file}")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main() -> None:
 | 
			
		||||
    """Main entry point."""
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Merge multiple component test configs into a single file"
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--components",
 | 
			
		||||
        "-c",
 | 
			
		||||
        required=True,
 | 
			
		||||
        help="Comma-separated list of component names to merge",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--platform",
 | 
			
		||||
        "-p",
 | 
			
		||||
        required=True,
 | 
			
		||||
        help="Platform to merge for (e.g., esp32-ard)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--output",
 | 
			
		||||
        "-o",
 | 
			
		||||
        required=True,
 | 
			
		||||
        type=Path,
 | 
			
		||||
        help="Output file path for merged config",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--tests-dir",
 | 
			
		||||
        type=Path,
 | 
			
		||||
        default=Path("tests/components"),
 | 
			
		||||
        help="Path to tests/components directory",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    component_names = [c.strip() for c in args.components.split(",")]
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        merge_component_configs(
 | 
			
		||||
            component_names=component_names,
 | 
			
		||||
            platform=args.platform,
 | 
			
		||||
            tests_dir=args.tests_dir,
 | 
			
		||||
            output_file=args.output,
 | 
			
		||||
        )
 | 
			
		||||
    except Exception as e:
 | 
			
		||||
        print(f"Error merging configs: {e}", file=sys.stderr)
 | 
			
		||||
        import traceback
 | 
			
		||||
 | 
			
		||||
        traceback.print_exc()
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    main()
 | 
			
		||||
							
								
								
									
										268
									
								
								script/split_components_for_ci.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										268
									
								
								script/split_components_for_ci.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,268 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
"""Split components into batches with intelligent grouping.
 | 
			
		||||
 | 
			
		||||
This script analyzes components to identify which ones share common bus configurations
 | 
			
		||||
and intelligently groups them into batches to maximize the efficiency of the
 | 
			
		||||
component grouping system in CI.
 | 
			
		||||
 | 
			
		||||
Components with the same bus signature are placed in the same batch whenever possible,
 | 
			
		||||
allowing the test_build_components.py script to merge them into single builds.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
import json
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
# Add esphome to path
 | 
			
		||||
sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
			
		||||
 | 
			
		||||
from script.analyze_component_buses import (
 | 
			
		||||
    ISOLATED_COMPONENTS,
 | 
			
		||||
    NO_BUSES_SIGNATURE,
 | 
			
		||||
    analyze_all_components,
 | 
			
		||||
    create_grouping_signature,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# Weighting for batch creation
 | 
			
		||||
# Isolated components can't be grouped/merged, so they count as 10x
 | 
			
		||||
# Groupable components can be merged into single builds, so they count as 1x
 | 
			
		||||
ISOLATED_WEIGHT = 10
 | 
			
		||||
GROUPABLE_WEIGHT = 1
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def has_test_files(component_name: str, tests_dir: Path) -> bool:
 | 
			
		||||
    """Check if a component has test files.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_name: Name of the component
 | 
			
		||||
        tests_dir: Path to tests/components directory
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        True if the component has test.*.yaml files
 | 
			
		||||
    """
 | 
			
		||||
    component_dir = tests_dir / component_name
 | 
			
		||||
    if not component_dir.exists() or not component_dir.is_dir():
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    # Check for test.*.yaml files
 | 
			
		||||
    return any(component_dir.glob("test.*.yaml"))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def create_intelligent_batches(
 | 
			
		||||
    components: list[str],
 | 
			
		||||
    tests_dir: Path,
 | 
			
		||||
    batch_size: int = 40,
 | 
			
		||||
) -> list[list[str]]:
 | 
			
		||||
    """Create batches optimized for component grouping.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        components: List of component names to batch
 | 
			
		||||
        tests_dir: Path to tests/components directory
 | 
			
		||||
        batch_size: Target size for each batch
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        List of component batches (lists of component names)
 | 
			
		||||
    """
 | 
			
		||||
    # Filter out components without test files
 | 
			
		||||
    # Platform components like 'climate' and 'climate_ir' don't have test files
 | 
			
		||||
    components_with_tests = [
 | 
			
		||||
        comp for comp in components if has_test_files(comp, tests_dir)
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # Log filtered components to stderr for debugging
 | 
			
		||||
    if len(components_with_tests) < len(components):
 | 
			
		||||
        filtered_out = set(components) - set(components_with_tests)
 | 
			
		||||
        print(
 | 
			
		||||
            f"Note: Filtered {len(filtered_out)} components without test files: "
 | 
			
		||||
            f"{', '.join(sorted(filtered_out))}",
 | 
			
		||||
            file=sys.stderr,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # Analyze all components to get their bus signatures
 | 
			
		||||
    component_buses, non_groupable, _direct_bus_components = analyze_all_components(
 | 
			
		||||
        tests_dir
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Group components by their bus signature ONLY (ignore platform)
 | 
			
		||||
    # All platforms will be tested by test_build_components.py for each batch
 | 
			
		||||
    # Key: signature, Value: list of components
 | 
			
		||||
    signature_groups: dict[str, list[str]] = defaultdict(list)
 | 
			
		||||
 | 
			
		||||
    for component in components_with_tests:
 | 
			
		||||
        # Components that can't be grouped get unique signatures
 | 
			
		||||
        # This includes both manually curated ISOLATED_COMPONENTS and
 | 
			
		||||
        # automatically detected non_groupable components
 | 
			
		||||
        # These can share a batch/runner but won't be grouped/merged
 | 
			
		||||
        if component in ISOLATED_COMPONENTS or component in non_groupable:
 | 
			
		||||
            signature_groups[f"isolated_{component}"].append(component)
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Get signature from any platform (they should all have the same buses)
 | 
			
		||||
        # Components not in component_buses were filtered out by has_test_files check
 | 
			
		||||
        comp_platforms = component_buses[component]
 | 
			
		||||
        for platform, buses in comp_platforms.items():
 | 
			
		||||
            if buses:
 | 
			
		||||
                signature = create_grouping_signature({platform: buses}, platform)
 | 
			
		||||
                # Group by signature only - platform doesn't matter for batching
 | 
			
		||||
                signature_groups[signature].append(component)
 | 
			
		||||
                break  # Only use first platform for grouping
 | 
			
		||||
        else:
 | 
			
		||||
            # No buses found for any platform - can be grouped together
 | 
			
		||||
            signature_groups[NO_BUSES_SIGNATURE].append(component)
 | 
			
		||||
 | 
			
		||||
    # Create batches by keeping signature groups together
 | 
			
		||||
    # Components with the same signature stay in the same batches
 | 
			
		||||
    batches = []
 | 
			
		||||
 | 
			
		||||
    # Sort signature groups to prioritize groupable components
 | 
			
		||||
    # 1. Put "isolated_*" signatures last (can't be grouped with others)
 | 
			
		||||
    # 2. Sort groupable signatures by size (largest first)
 | 
			
		||||
    # 3. "no_buses" components CAN be grouped together
 | 
			
		||||
    def sort_key(item):
 | 
			
		||||
        signature, components = item
 | 
			
		||||
        is_isolated = signature.startswith("isolated_")
 | 
			
		||||
        # Put "isolated_*" last (1), groupable first (0)
 | 
			
		||||
        # Within each category, sort by size (largest first)
 | 
			
		||||
        return (is_isolated, -len(components))
 | 
			
		||||
 | 
			
		||||
    sorted_groups = sorted(signature_groups.items(), key=sort_key)
 | 
			
		||||
 | 
			
		||||
    # Strategy: Create batches using weighted sizes
 | 
			
		||||
    # - Isolated components count as 10x (since they can't be grouped/merged)
 | 
			
		||||
    # - Groupable components count as 1x (can be merged into single builds)
 | 
			
		||||
    # - This distributes isolated components across more runners
 | 
			
		||||
    # - Ensures each runner has a good mix of groupable vs isolated components
 | 
			
		||||
 | 
			
		||||
    current_batch = []
 | 
			
		||||
    current_weight = 0
 | 
			
		||||
 | 
			
		||||
    for signature, group_components in sorted_groups:
 | 
			
		||||
        is_isolated = signature.startswith("isolated_")
 | 
			
		||||
        weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
 | 
			
		||||
 | 
			
		||||
        for component in group_components:
 | 
			
		||||
            # Check if adding this component would exceed the batch size
 | 
			
		||||
            if current_weight + weight_per_component > batch_size and current_batch:
 | 
			
		||||
                # Start a new batch
 | 
			
		||||
                batches.append(current_batch)
 | 
			
		||||
                current_batch = []
 | 
			
		||||
                current_weight = 0
 | 
			
		||||
 | 
			
		||||
            # Add component to current batch
 | 
			
		||||
            current_batch.append(component)
 | 
			
		||||
            current_weight += weight_per_component
 | 
			
		||||
 | 
			
		||||
    # Don't forget the last batch
 | 
			
		||||
    if current_batch:
 | 
			
		||||
        batches.append(current_batch)
 | 
			
		||||
 | 
			
		||||
    return batches
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main() -> int:
 | 
			
		||||
    """Main entry point."""
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Split components into intelligent batches for CI testing"
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--components",
 | 
			
		||||
        "-c",
 | 
			
		||||
        required=True,
 | 
			
		||||
        help="JSON array of component names",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--batch-size",
 | 
			
		||||
        "-b",
 | 
			
		||||
        type=int,
 | 
			
		||||
        default=40,
 | 
			
		||||
        help="Target batch size (default: 40, weighted)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--tests-dir",
 | 
			
		||||
        type=Path,
 | 
			
		||||
        default=Path("tests/components"),
 | 
			
		||||
        help="Path to tests/components directory",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--output",
 | 
			
		||||
        "-o",
 | 
			
		||||
        choices=["json", "github"],
 | 
			
		||||
        default="github",
 | 
			
		||||
        help="Output format (json or github for GitHub Actions)",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # Parse component list from JSON
 | 
			
		||||
    try:
 | 
			
		||||
        components = json.loads(args.components)
 | 
			
		||||
    except json.JSONDecodeError as e:
 | 
			
		||||
        print(f"Error parsing components JSON: {e}", file=sys.stderr)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    if not isinstance(components, list):
 | 
			
		||||
        print("Components must be a JSON array", file=sys.stderr)
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    # Create intelligent batches
 | 
			
		||||
    batches = create_intelligent_batches(
 | 
			
		||||
        components=components,
 | 
			
		||||
        tests_dir=args.tests_dir,
 | 
			
		||||
        batch_size=args.batch_size,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Convert batches to space-separated strings for CI
 | 
			
		||||
    batch_strings = [" ".join(batch) for batch in batches]
 | 
			
		||||
 | 
			
		||||
    if args.output == "json":
 | 
			
		||||
        # Output as JSON array
 | 
			
		||||
        print(json.dumps(batch_strings))
 | 
			
		||||
    else:
 | 
			
		||||
        # Output for GitHub Actions (set output)
 | 
			
		||||
        output_json = json.dumps(batch_strings)
 | 
			
		||||
        print(f"components={output_json}")
 | 
			
		||||
 | 
			
		||||
    # Print summary to stderr so it shows in CI logs
 | 
			
		||||
    # Count actual components being batched
 | 
			
		||||
    actual_components = sum(len(batch.split()) for batch in batch_strings)
 | 
			
		||||
 | 
			
		||||
    # Re-analyze to get isolated component counts for summary
 | 
			
		||||
    _, non_groupable, _ = analyze_all_components(args.tests_dir)
 | 
			
		||||
 | 
			
		||||
    # Count isolated vs groupable components
 | 
			
		||||
    all_batched_components = [comp for batch in batches for comp in batch]
 | 
			
		||||
    isolated_count = sum(
 | 
			
		||||
        1
 | 
			
		||||
        for comp in all_batched_components
 | 
			
		||||
        if comp in ISOLATED_COMPONENTS or comp in non_groupable
 | 
			
		||||
    )
 | 
			
		||||
    groupable_count = actual_components - isolated_count
 | 
			
		||||
 | 
			
		||||
    print("\n=== Intelligent Batch Summary ===", file=sys.stderr)
 | 
			
		||||
    print(f"Total components requested: {len(components)}", file=sys.stderr)
 | 
			
		||||
    print(f"Components with test files: {actual_components}", file=sys.stderr)
 | 
			
		||||
    print(f"  - Groupable (weight=1): {groupable_count}", file=sys.stderr)
 | 
			
		||||
    print(f"  - Isolated (weight=10): {isolated_count}", file=sys.stderr)
 | 
			
		||||
    if actual_components < len(components):
 | 
			
		||||
        print(
 | 
			
		||||
            f"Components skipped (no test files): {len(components) - actual_components}",
 | 
			
		||||
            file=sys.stderr,
 | 
			
		||||
        )
 | 
			
		||||
    print(f"Number of batches: {len(batches)}", file=sys.stderr)
 | 
			
		||||
    print(f"Batch size target (weighted): {args.batch_size}", file=sys.stderr)
 | 
			
		||||
    if len(batches) > 0:
 | 
			
		||||
        print(
 | 
			
		||||
            f"Average components per batch: {actual_components / len(batches):.1f}",
 | 
			
		||||
            file=sys.stderr,
 | 
			
		||||
        )
 | 
			
		||||
    print(file=sys.stderr)
 | 
			
		||||
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    sys.exit(main())
 | 
			
		||||
@@ -1,106 +0,0 @@
 | 
			
		||||
#!/usr/bin/env bash
 | 
			
		||||
 | 
			
		||||
set -e
 | 
			
		||||
 | 
			
		||||
help() {
 | 
			
		||||
  echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>]" 1>&2
 | 
			
		||||
  echo 1>&2
 | 
			
		||||
  echo "  - e - Parameter for esphome command. Default compile. Common alternative is config." 1>&2
 | 
			
		||||
  echo "  - c - Component folder name to test. Default *. E.g. '-c logger'." 1>&2
 | 
			
		||||
  echo "  - t - Target name to test. Put '-t list' to display all possibilities. E.g. '-t esp32-s2-idf-51'." 1>&2
 | 
			
		||||
  exit 1
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Parse parameter:
 | 
			
		||||
# - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
 | 
			
		||||
# - `c` - Component folder name to test. Default `*`.
 | 
			
		||||
esphome_command="compile"
 | 
			
		||||
target_component="*"
 | 
			
		||||
while getopts e:c:t: flag
 | 
			
		||||
do
 | 
			
		||||
    case $flag in
 | 
			
		||||
        e) esphome_command=${OPTARG};;
 | 
			
		||||
        c) target_component=${OPTARG};;
 | 
			
		||||
        t) requested_target_platform=${OPTARG};;
 | 
			
		||||
        \?) help;;
 | 
			
		||||
    esac
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
cd "$(dirname "$0")/.."
 | 
			
		||||
 | 
			
		||||
if ! [ -d "./tests/test_build_components/build" ]; then
 | 
			
		||||
  mkdir ./tests/test_build_components/build
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
start_esphome() {
 | 
			
		||||
  if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
 | 
			
		||||
    echo "Skipping $target_platform_with_version"
 | 
			
		||||
    return
 | 
			
		||||
  fi
 | 
			
		||||
  # create dynamic yaml file in `build` folder.
 | 
			
		||||
  # `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
 | 
			
		||||
  component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
 | 
			
		||||
 | 
			
		||||
  cp $target_platform_file $component_test_file
 | 
			
		||||
  if [[ "$OSTYPE" == "darwin"* ]]; then
 | 
			
		||||
    # macOS sed is...different
 | 
			
		||||
    sed -i '' "s!\$component_test_file!../../.$f!g" $component_test_file
 | 
			
		||||
  else
 | 
			
		||||
    sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  # Start esphome process
 | 
			
		||||
  echo "> [$target_component] [$test_name] [$target_platform_with_version]"
 | 
			
		||||
  set -x
 | 
			
		||||
  # TODO: Validate escape of Command line substitution value
 | 
			
		||||
  python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
 | 
			
		||||
  { set +x; } 2>/dev/null
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Find all test yaml files.
 | 
			
		||||
# - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
 | 
			
		||||
# - `./tests/components/[target_component]/[test_name].all.yaml`
 | 
			
		||||
for f in ./tests/components/$target_component/*.*.yaml; do
 | 
			
		||||
  [ -f "$f" ] || continue
 | 
			
		||||
  IFS='/' read -r -a folder_name <<< "$f"
 | 
			
		||||
  target_component="${folder_name[3]}"
 | 
			
		||||
 | 
			
		||||
  IFS='.' read -r -a file_name <<< "${folder_name[4]}"
 | 
			
		||||
  test_name="${file_name[0]}"
 | 
			
		||||
  target_platform="${file_name[1]}"
 | 
			
		||||
  file_name_parts=${#file_name[@]}
 | 
			
		||||
 | 
			
		||||
  if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
 | 
			
		||||
    # Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
 | 
			
		||||
 | 
			
		||||
    for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
 | 
			
		||||
      IFS='/' read -r -a folder_name <<< "$target_platform_file"
 | 
			
		||||
      IFS='.' read -r -a file_name <<< "${folder_name[3]}"
 | 
			
		||||
      target_platform="${file_name[1]}"
 | 
			
		||||
 | 
			
		||||
      start_esphome
 | 
			
		||||
    done
 | 
			
		||||
 | 
			
		||||
  else
 | 
			
		||||
    # Test has defined a specific target platform.
 | 
			
		||||
 | 
			
		||||
    # Validate we have a base test yaml for selected platform.
 | 
			
		||||
    # The target_platform is sourced from the following location.
 | 
			
		||||
    # 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
 | 
			
		||||
    # 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
 | 
			
		||||
    target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
 | 
			
		||||
    if ! [ -f "$target_platform_file" ]; then
 | 
			
		||||
      echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
 | 
			
		||||
      exit 1
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
 | 
			
		||||
      # trim off "./tests/test_build_components/build_components_base." prefix
 | 
			
		||||
      target_platform_with_version=${target_platform_file:52}
 | 
			
		||||
      # ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
 | 
			
		||||
      # For example: "esp32-s3-idf-50"
 | 
			
		||||
      target_platform_with_version=${target_platform_with_version%.*}
 | 
			
		||||
      start_esphome
 | 
			
		||||
    done
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
							
								
								
									
										1
									
								
								script/test_build_components
									
									
									
									
									
										Symbolic link
									
								
							
							
						
						
									
										1
									
								
								script/test_build_components
									
									
									
									
									
										Symbolic link
									
								
							@@ -0,0 +1 @@
 | 
			
		||||
test_build_components.py
 | 
			
		||||
							
								
								
									
										931
									
								
								script/test_build_components.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										931
									
								
								script/test_build_components.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,931 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
"""Test ESPHome component builds with intelligent grouping.
 | 
			
		||||
 | 
			
		||||
This script replaces the bash test_build_components script with Python,
 | 
			
		||||
adding support for intelligent component grouping based on shared bus
 | 
			
		||||
configurations to reduce CI build time.
 | 
			
		||||
 | 
			
		||||
Features:
 | 
			
		||||
- Analyzes components for shared common bus configs
 | 
			
		||||
- Groups compatible components together
 | 
			
		||||
- Merges configs for grouped components
 | 
			
		||||
- Uses --testing-mode for grouped tests
 | 
			
		||||
- Maintains backward compatibility with single component testing
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
import hashlib
 | 
			
		||||
import os
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
# Add esphome to path
 | 
			
		||||
sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
			
		||||
 | 
			
		||||
# pylint: disable=wrong-import-position
 | 
			
		||||
from script.analyze_component_buses import (
 | 
			
		||||
    BASE_BUS_COMPONENTS,
 | 
			
		||||
    ISOLATED_COMPONENTS,
 | 
			
		||||
    NO_BUSES_SIGNATURE,
 | 
			
		||||
    analyze_all_components,
 | 
			
		||||
    create_grouping_signature,
 | 
			
		||||
    is_platform_component,
 | 
			
		||||
    uses_local_file_references,
 | 
			
		||||
)
 | 
			
		||||
from script.merge_component_configs import merge_component_configs
 | 
			
		||||
 | 
			
		||||
# Platform-specific maximum group sizes
 | 
			
		||||
# ESP8266 has limited IRAM and can't handle large component groups
 | 
			
		||||
PLATFORM_MAX_GROUP_SIZE = {
 | 
			
		||||
    "esp8266-ard": 10,  # ESP8266 Arduino has limited IRAM
 | 
			
		||||
    "esp8266-idf": 10,  # ESP8266 IDF also has limited IRAM
 | 
			
		||||
    # BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
 | 
			
		||||
    # Other platforms can handle larger groups
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def show_disk_space_if_ci(esphome_command: str) -> None:
 | 
			
		||||
    """Show disk space usage if running in CI during compile.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        esphome_command: The esphome command being run (config/compile/clean)
 | 
			
		||||
    """
 | 
			
		||||
    if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
 | 
			
		||||
        print("\n" + "=" * 80)
 | 
			
		||||
        print("Disk Space After Build:")
 | 
			
		||||
        print("=" * 80)
 | 
			
		||||
        subprocess.run(["df", "-h"], check=False)
 | 
			
		||||
        print("=" * 80 + "\n")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def find_component_tests(
 | 
			
		||||
    components_dir: Path, component_pattern: str = "*"
 | 
			
		||||
) -> dict[str, list[Path]]:
 | 
			
		||||
    """Find all component test files.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        components_dir: Path to tests/components directory
 | 
			
		||||
        component_pattern: Glob pattern for component names
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Dictionary mapping component name to list of test files
 | 
			
		||||
    """
 | 
			
		||||
    component_tests = defaultdict(list)
 | 
			
		||||
 | 
			
		||||
    for comp_dir in components_dir.glob(component_pattern):
 | 
			
		||||
        if not comp_dir.is_dir():
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        for test_file in comp_dir.glob("test.*.yaml"):
 | 
			
		||||
            component_tests[comp_dir.name].append(test_file)
 | 
			
		||||
 | 
			
		||||
    return dict(component_tests)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_test_filename(test_file: Path) -> tuple[str, str]:
 | 
			
		||||
    """Parse test filename to extract test name and platform.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        test_file: Path to test file
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of (test_name, platform)
 | 
			
		||||
    """
 | 
			
		||||
    parts = test_file.stem.split(".")
 | 
			
		||||
    if len(parts) == 2:
 | 
			
		||||
        return parts[0], parts[1]  # test, platform
 | 
			
		||||
    return parts[0], "all"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
 | 
			
		||||
    """Get all platform base files.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        base_dir: Path to test_build_components directory
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Dictionary mapping platform to list of base files (for version variants)
 | 
			
		||||
    """
 | 
			
		||||
    platform_files = defaultdict(list)
 | 
			
		||||
 | 
			
		||||
    for base_file in base_dir.glob("build_components_base.*.yaml"):
 | 
			
		||||
        # Extract platform from filename
 | 
			
		||||
        # e.g., build_components_base.esp32-idf.yaml -> esp32-idf
 | 
			
		||||
        # or build_components_base.esp32-idf-50.yaml -> esp32-idf
 | 
			
		||||
        filename = base_file.stem
 | 
			
		||||
        parts = filename.replace("build_components_base.", "").split("-")
 | 
			
		||||
 | 
			
		||||
        # Platform is everything before version number (if present)
 | 
			
		||||
        # Check if last part is a number (version)
 | 
			
		||||
        platform = "-".join(parts[:-1]) if parts[-1].isdigit() else "-".join(parts)
 | 
			
		||||
 | 
			
		||||
        platform_files[platform].append(base_file)
 | 
			
		||||
 | 
			
		||||
    return dict(platform_files)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def extract_platform_with_version(base_file: Path) -> str:
 | 
			
		||||
    """Extract platform with version from base filename.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        base_file: Path to base file
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Platform with version (e.g., "esp32-idf-50" or "esp32-idf")
 | 
			
		||||
    """
 | 
			
		||||
    # Remove "build_components_base." prefix and ".yaml" suffix
 | 
			
		||||
    return base_file.stem.replace("build_components_base.", "")
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_esphome_test(
 | 
			
		||||
    component: str,
 | 
			
		||||
    test_file: Path,
 | 
			
		||||
    platform: str,
 | 
			
		||||
    platform_with_version: str,
 | 
			
		||||
    base_file: Path,
 | 
			
		||||
    build_dir: Path,
 | 
			
		||||
    esphome_command: str,
 | 
			
		||||
    continue_on_fail: bool,
 | 
			
		||||
    use_testing_mode: bool = False,
 | 
			
		||||
) -> tuple[bool, str]:
 | 
			
		||||
    """Run esphome test for a single component.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component: Component name
 | 
			
		||||
        test_file: Path to component test file
 | 
			
		||||
        platform: Platform name (e.g., "esp32-idf")
 | 
			
		||||
        platform_with_version: Platform with version (e.g., "esp32-idf-50")
 | 
			
		||||
        base_file: Path to platform base file
 | 
			
		||||
        build_dir: Path to build directory
 | 
			
		||||
        esphome_command: ESPHome command (config/compile)
 | 
			
		||||
        continue_on_fail: Whether to continue on failure
 | 
			
		||||
        use_testing_mode: Whether to use --testing-mode flag
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of (success status, command string)
 | 
			
		||||
    """
 | 
			
		||||
    test_name = test_file.stem.split(".")[0]
 | 
			
		||||
 | 
			
		||||
    # Create dynamic test file in build directory
 | 
			
		||||
    output_file = build_dir / f"{component}.{test_name}.{platform_with_version}.yaml"
 | 
			
		||||
 | 
			
		||||
    # Copy base file and substitute component test file reference
 | 
			
		||||
    base_content = base_file.read_text()
 | 
			
		||||
    # Get relative path from build dir to test file
 | 
			
		||||
    repo_root = Path(__file__).parent.parent
 | 
			
		||||
    component_test_ref = f"../../{test_file.relative_to(repo_root / 'tests')}"
 | 
			
		||||
    output_content = base_content.replace("$component_test_file", component_test_ref)
 | 
			
		||||
    output_file.write_text(output_content)
 | 
			
		||||
 | 
			
		||||
    # Build esphome command
 | 
			
		||||
    cmd = [
 | 
			
		||||
        sys.executable,
 | 
			
		||||
        "-m",
 | 
			
		||||
        "esphome",
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # Add --testing-mode if needed (must be before subcommand)
 | 
			
		||||
    if use_testing_mode:
 | 
			
		||||
        cmd.append("--testing-mode")
 | 
			
		||||
 | 
			
		||||
    # Add substitutions
 | 
			
		||||
    cmd.extend(
 | 
			
		||||
        [
 | 
			
		||||
            "-s",
 | 
			
		||||
            "component_name",
 | 
			
		||||
            component,
 | 
			
		||||
            "-s",
 | 
			
		||||
            "component_dir",
 | 
			
		||||
            f"../../components/{component}",
 | 
			
		||||
            "-s",
 | 
			
		||||
            "test_name",
 | 
			
		||||
            test_name,
 | 
			
		||||
            "-s",
 | 
			
		||||
            "target_platform",
 | 
			
		||||
            platform,
 | 
			
		||||
        ]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Add command and config file
 | 
			
		||||
    cmd.extend([esphome_command, str(output_file)])
 | 
			
		||||
 | 
			
		||||
    # Build command string for display/logging
 | 
			
		||||
    cmd_str = " ".join(cmd)
 | 
			
		||||
 | 
			
		||||
    # Run command
 | 
			
		||||
    print(f"> [{component}] [{test_name}] [{platform_with_version}]")
 | 
			
		||||
    if use_testing_mode:
 | 
			
		||||
        print("  (using --testing-mode)")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        result = subprocess.run(cmd, check=False)
 | 
			
		||||
        success = result.returncode == 0
 | 
			
		||||
 | 
			
		||||
        # Show disk space after build in CI during compile
 | 
			
		||||
        show_disk_space_if_ci(esphome_command)
 | 
			
		||||
 | 
			
		||||
        if not success and not continue_on_fail:
 | 
			
		||||
            # Print command immediately for failed tests
 | 
			
		||||
            print(f"\n{'=' * 80}")
 | 
			
		||||
            print("FAILED - Command to reproduce:")
 | 
			
		||||
            print(f"{'=' * 80}")
 | 
			
		||||
            print(cmd_str)
 | 
			
		||||
            print()
 | 
			
		||||
            raise subprocess.CalledProcessError(result.returncode, cmd)
 | 
			
		||||
        return success, cmd_str
 | 
			
		||||
    except subprocess.CalledProcessError:
 | 
			
		||||
        # Re-raise if we're not continuing on fail
 | 
			
		||||
        if not continue_on_fail:
 | 
			
		||||
            raise
 | 
			
		||||
        return False, cmd_str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_grouped_test(
 | 
			
		||||
    components: list[str],
 | 
			
		||||
    platform: str,
 | 
			
		||||
    platform_with_version: str,
 | 
			
		||||
    base_file: Path,
 | 
			
		||||
    build_dir: Path,
 | 
			
		||||
    tests_dir: Path,
 | 
			
		||||
    esphome_command: str,
 | 
			
		||||
    continue_on_fail: bool,
 | 
			
		||||
) -> tuple[bool, str]:
 | 
			
		||||
    """Run esphome test for a group of components with shared bus configs.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        components: List of component names to test together
 | 
			
		||||
        platform: Platform name (e.g., "esp32-idf")
 | 
			
		||||
        platform_with_version: Platform with version (e.g., "esp32-idf-50")
 | 
			
		||||
        base_file: Path to platform base file
 | 
			
		||||
        build_dir: Path to build directory
 | 
			
		||||
        tests_dir: Path to tests/components directory
 | 
			
		||||
        esphome_command: ESPHome command (config/compile)
 | 
			
		||||
        continue_on_fail: Whether to continue on failure
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of (success status, command string)
 | 
			
		||||
    """
 | 
			
		||||
    # Create merged config
 | 
			
		||||
    group_name = "_".join(components[:3])  # Use first 3 components for name
 | 
			
		||||
    if len(components) > 3:
 | 
			
		||||
        group_name += f"_plus_{len(components) - 3}"
 | 
			
		||||
 | 
			
		||||
    # Create unique device name by hashing sorted component list + platform
 | 
			
		||||
    # This prevents conflicts when different component groups are tested
 | 
			
		||||
    sorted_components = sorted(components)
 | 
			
		||||
    hash_input = "_".join(sorted_components) + "_" + platform
 | 
			
		||||
    group_hash = hashlib.md5(hash_input.encode()).hexdigest()[:8]
 | 
			
		||||
    device_name = f"comptest{platform.replace('-', '')}{group_hash}"
 | 
			
		||||
 | 
			
		||||
    merged_config_file = build_dir / f"merged_{group_name}.{platform_with_version}.yaml"
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        merge_component_configs(
 | 
			
		||||
            component_names=components,
 | 
			
		||||
            platform=platform_with_version,
 | 
			
		||||
            tests_dir=tests_dir,
 | 
			
		||||
            output_file=merged_config_file,
 | 
			
		||||
        )
 | 
			
		||||
    except Exception as e:  # pylint: disable=broad-exception-caught
 | 
			
		||||
        print(f"Error merging configs for {components}: {e}")
 | 
			
		||||
        if not continue_on_fail:
 | 
			
		||||
            raise
 | 
			
		||||
        # Return empty command string since we failed before building the command
 | 
			
		||||
        return False, f"# Failed during config merge: {e}"
 | 
			
		||||
 | 
			
		||||
    # Create test file that includes merged config
 | 
			
		||||
    output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
 | 
			
		||||
    base_content = base_file.read_text()
 | 
			
		||||
    merged_ref = merged_config_file.name
 | 
			
		||||
    output_content = base_content.replace("$component_test_file", merged_ref)
 | 
			
		||||
    output_file.write_text(output_content)
 | 
			
		||||
 | 
			
		||||
    # Build esphome command with --testing-mode
 | 
			
		||||
    cmd = [
 | 
			
		||||
        sys.executable,
 | 
			
		||||
        "-m",
 | 
			
		||||
        "esphome",
 | 
			
		||||
        "--testing-mode",  # Required for grouped tests
 | 
			
		||||
        "-s",
 | 
			
		||||
        "component_name",
 | 
			
		||||
        device_name,  # Use unique hash-based device name
 | 
			
		||||
        "-s",
 | 
			
		||||
        "component_dir",
 | 
			
		||||
        "../../components",
 | 
			
		||||
        "-s",
 | 
			
		||||
        "test_name",
 | 
			
		||||
        "merged",
 | 
			
		||||
        "-s",
 | 
			
		||||
        "target_platform",
 | 
			
		||||
        platform,
 | 
			
		||||
        esphome_command,
 | 
			
		||||
        str(output_file),
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    # Build command string for display/logging
 | 
			
		||||
    cmd_str = " ".join(cmd)
 | 
			
		||||
 | 
			
		||||
    # Run command
 | 
			
		||||
    components_str = ", ".join(components)
 | 
			
		||||
    print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
 | 
			
		||||
    print("  (using --testing-mode)")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        result = subprocess.run(cmd, check=False)
 | 
			
		||||
        success = result.returncode == 0
 | 
			
		||||
 | 
			
		||||
        # Show disk space after build in CI during compile
 | 
			
		||||
        show_disk_space_if_ci(esphome_command)
 | 
			
		||||
 | 
			
		||||
        if not success and not continue_on_fail:
 | 
			
		||||
            # Print command immediately for failed tests
 | 
			
		||||
            print(f"\n{'=' * 80}")
 | 
			
		||||
            print("FAILED - Command to reproduce:")
 | 
			
		||||
            print(f"{'=' * 80}")
 | 
			
		||||
            print(cmd_str)
 | 
			
		||||
            print()
 | 
			
		||||
            raise subprocess.CalledProcessError(result.returncode, cmd)
 | 
			
		||||
        return success, cmd_str
 | 
			
		||||
    except subprocess.CalledProcessError:
 | 
			
		||||
        # Re-raise if we're not continuing on fail
 | 
			
		||||
        if not continue_on_fail:
 | 
			
		||||
            raise
 | 
			
		||||
        return False, cmd_str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_grouped_component_tests(
 | 
			
		||||
    all_tests: dict[str, list[Path]],
 | 
			
		||||
    platform_filter: str | None,
 | 
			
		||||
    platform_bases: dict[str, list[Path]],
 | 
			
		||||
    tests_dir: Path,
 | 
			
		||||
    build_dir: Path,
 | 
			
		||||
    esphome_command: str,
 | 
			
		||||
    continue_on_fail: bool,
 | 
			
		||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
 | 
			
		||||
    """Run grouped component tests.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        all_tests: Dictionary mapping component names to test files
 | 
			
		||||
        platform_filter: Optional platform to filter by
 | 
			
		||||
        platform_bases: Platform base files mapping
 | 
			
		||||
        tests_dir: Path to tests/components directory
 | 
			
		||||
        build_dir: Path to build directory
 | 
			
		||||
        esphome_command: ESPHome command (config/compile)
 | 
			
		||||
        continue_on_fail: Whether to continue on failure
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
 | 
			
		||||
    """
 | 
			
		||||
    tested_components = set()
 | 
			
		||||
    passed_tests = []
 | 
			
		||||
    failed_tests = []
 | 
			
		||||
    failed_commands = {}  # Map test_id to command string
 | 
			
		||||
 | 
			
		||||
    # Group components by platform and bus signature
 | 
			
		||||
    grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
 | 
			
		||||
    print("\n" + "=" * 80)
 | 
			
		||||
    print("Analyzing components for intelligent grouping...")
 | 
			
		||||
    print("=" * 80)
 | 
			
		||||
    component_buses, non_groupable, direct_bus_components = analyze_all_components(
 | 
			
		||||
        tests_dir
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # Track why components can't be grouped (for detailed output)
 | 
			
		||||
    non_groupable_reasons = {}
 | 
			
		||||
 | 
			
		||||
    # Group by (platform, bus_signature)
 | 
			
		||||
    for component, platforms in component_buses.items():
 | 
			
		||||
        if component not in all_tests:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Skip components that must be tested in isolation
 | 
			
		||||
        # These are shown separately and should not be in non_groupable_reasons
 | 
			
		||||
        if component in ISOLATED_COMPONENTS:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Skip base bus components (these test the bus platforms themselves)
 | 
			
		||||
        if component in BASE_BUS_COMPONENTS:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Skip components that use local file references or direct bus configs
 | 
			
		||||
        if component in non_groupable:
 | 
			
		||||
            # Track the reason (using pre-calculated results to avoid expensive re-analysis)
 | 
			
		||||
            if component not in non_groupable_reasons:
 | 
			
		||||
                if component in direct_bus_components:
 | 
			
		||||
                    non_groupable_reasons[component] = (
 | 
			
		||||
                        "Defines buses directly (not via packages) - NEEDS MIGRATION"
 | 
			
		||||
                    )
 | 
			
		||||
                elif uses_local_file_references(tests_dir / component):
 | 
			
		||||
                    non_groupable_reasons[component] = (
 | 
			
		||||
                        "Uses local file references ($component_dir)"
 | 
			
		||||
                    )
 | 
			
		||||
                elif is_platform_component(tests_dir / component):
 | 
			
		||||
                    non_groupable_reasons[component] = (
 | 
			
		||||
                        "Platform component (abstract base class)"
 | 
			
		||||
                    )
 | 
			
		||||
                else:
 | 
			
		||||
                    non_groupable_reasons[component] = (
 | 
			
		||||
                        "Uses !extend or !remove directives"
 | 
			
		||||
                    )
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        for platform, buses in platforms.items():
 | 
			
		||||
            # Skip if platform doesn't match filter
 | 
			
		||||
            if platform_filter and not platform.startswith(platform_filter):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            # Create signature for this component's bus configuration
 | 
			
		||||
            # Components with no buses get NO_BUSES_SIGNATURE so they can be grouped together
 | 
			
		||||
            if buses:
 | 
			
		||||
                signature = create_grouping_signature({platform: buses}, platform)
 | 
			
		||||
            else:
 | 
			
		||||
                signature = NO_BUSES_SIGNATURE
 | 
			
		||||
 | 
			
		||||
            # Add to grouped components (including those with no buses)
 | 
			
		||||
            if signature:
 | 
			
		||||
                grouped_components[(platform, signature)].append(component)
 | 
			
		||||
 | 
			
		||||
    # Print detailed grouping plan
 | 
			
		||||
    print("\nGrouping Plan:")
 | 
			
		||||
    print("-" * 80)
 | 
			
		||||
 | 
			
		||||
    # Show isolated components (must test individually due to known issues)
 | 
			
		||||
    isolated_in_tests = [c for c in ISOLATED_COMPONENTS if c in all_tests]
 | 
			
		||||
    if isolated_in_tests:
 | 
			
		||||
        print(
 | 
			
		||||
            f"\n⚠ {len(isolated_in_tests)} components must be tested in isolation (known build issues):"
 | 
			
		||||
        )
 | 
			
		||||
        for comp in sorted(isolated_in_tests):
 | 
			
		||||
            reason = ISOLATED_COMPONENTS[comp]
 | 
			
		||||
            print(f"  - {comp}: {reason}")
 | 
			
		||||
 | 
			
		||||
    # Show base bus components (test the bus platform implementations)
 | 
			
		||||
    base_bus_in_tests = [c for c in BASE_BUS_COMPONENTS if c in all_tests]
 | 
			
		||||
    if base_bus_in_tests:
 | 
			
		||||
        print(
 | 
			
		||||
            f"\n○ {len(base_bus_in_tests)} base bus platform components (tested individually):"
 | 
			
		||||
        )
 | 
			
		||||
        for comp in sorted(base_bus_in_tests):
 | 
			
		||||
            print(f"  - {comp}")
 | 
			
		||||
 | 
			
		||||
    # Show excluded components with detailed reasons
 | 
			
		||||
    if non_groupable_reasons:
 | 
			
		||||
        excluded_in_tests = [c for c in non_groupable_reasons if c in all_tests]
 | 
			
		||||
        if excluded_in_tests:
 | 
			
		||||
            print(
 | 
			
		||||
                f"\n⚠ {len(excluded_in_tests)} components excluded from grouping (each needs individual build):"
 | 
			
		||||
            )
 | 
			
		||||
            # Group by reason to show summary
 | 
			
		||||
            direct_bus = [
 | 
			
		||||
                c
 | 
			
		||||
                for c in excluded_in_tests
 | 
			
		||||
                if "NEEDS MIGRATION" in non_groupable_reasons.get(c, "")
 | 
			
		||||
            ]
 | 
			
		||||
            if direct_bus:
 | 
			
		||||
                print(
 | 
			
		||||
                    f"\n  ⚠⚠⚠ {len(direct_bus)} DEFINE BUSES DIRECTLY - NEED MIGRATION TO PACKAGES:"
 | 
			
		||||
                )
 | 
			
		||||
                for comp in sorted(direct_bus):
 | 
			
		||||
                    print(f"    - {comp}")
 | 
			
		||||
 | 
			
		||||
            other_reasons = [
 | 
			
		||||
                c
 | 
			
		||||
                for c in excluded_in_tests
 | 
			
		||||
                if "NEEDS MIGRATION" not in non_groupable_reasons.get(c, "")
 | 
			
		||||
            ]
 | 
			
		||||
            if other_reasons and len(other_reasons) <= 10:
 | 
			
		||||
                print("\n  Other non-groupable components:")
 | 
			
		||||
                for comp in sorted(other_reasons):
 | 
			
		||||
                    reason = non_groupable_reasons[comp]
 | 
			
		||||
                    print(f"    - {comp}: {reason}")
 | 
			
		||||
            elif other_reasons:
 | 
			
		||||
                print(
 | 
			
		||||
                    f"\n  Other non-groupable components: {len(other_reasons)} components"
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    # Distribute no_buses components into other groups to maximize efficiency
 | 
			
		||||
    # Components with no buses can merge with any bus group since they have no conflicting requirements
 | 
			
		||||
    no_buses_by_platform: dict[str, list[str]] = {}
 | 
			
		||||
    for (platform, signature), components in list(grouped_components.items()):
 | 
			
		||||
        if signature == NO_BUSES_SIGNATURE:
 | 
			
		||||
            no_buses_by_platform[platform] = components
 | 
			
		||||
            # Remove from grouped_components - we'll distribute them
 | 
			
		||||
            del grouped_components[(platform, signature)]
 | 
			
		||||
 | 
			
		||||
    # Distribute no_buses components into existing groups for each platform
 | 
			
		||||
    for platform, no_buses_comps in no_buses_by_platform.items():
 | 
			
		||||
        # Find all non-empty groups for this platform (excluding no_buses)
 | 
			
		||||
        platform_groups = [
 | 
			
		||||
            (sig, comps)
 | 
			
		||||
            for (plat, sig), comps in grouped_components.items()
 | 
			
		||||
            if plat == platform and sig != NO_BUSES_SIGNATURE
 | 
			
		||||
        ]
 | 
			
		||||
 | 
			
		||||
        if platform_groups:
 | 
			
		||||
            # Distribute no_buses components round-robin across existing groups
 | 
			
		||||
            for i, comp in enumerate(no_buses_comps):
 | 
			
		||||
                sig, _ = platform_groups[i % len(platform_groups)]
 | 
			
		||||
                grouped_components[(platform, sig)].append(comp)
 | 
			
		||||
        else:
 | 
			
		||||
            # No other groups for this platform - keep no_buses components together
 | 
			
		||||
            grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
 | 
			
		||||
 | 
			
		||||
    # Split groups that exceed platform-specific maximum sizes
 | 
			
		||||
    # ESP8266 has limited IRAM and can't handle large component groups
 | 
			
		||||
    split_groups = {}
 | 
			
		||||
    for (platform, signature), components in list(grouped_components.items()):
 | 
			
		||||
        max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
 | 
			
		||||
        if max_size and len(components) > max_size:
 | 
			
		||||
            # Split this group into smaller groups
 | 
			
		||||
            print(
 | 
			
		||||
                f"\n  ℹ️ Splitting {platform} group (signature: {signature}) "
 | 
			
		||||
                f"from {len(components)} to max {max_size} components per group"
 | 
			
		||||
            )
 | 
			
		||||
            # Remove original group
 | 
			
		||||
            del grouped_components[(platform, signature)]
 | 
			
		||||
            # Create split groups
 | 
			
		||||
            for i in range(0, len(components), max_size):
 | 
			
		||||
                split_components = components[i : i + max_size]
 | 
			
		||||
                # Create unique signature for each split group
 | 
			
		||||
                split_signature = f"{signature}_split{i // max_size + 1}"
 | 
			
		||||
                split_groups[(platform, split_signature)] = split_components
 | 
			
		||||
    # Add split groups back
 | 
			
		||||
    grouped_components.update(split_groups)
 | 
			
		||||
 | 
			
		||||
    groups_to_test = []
 | 
			
		||||
    individual_tests = set()  # Use set to avoid duplicates
 | 
			
		||||
 | 
			
		||||
    for (platform, signature), components in sorted(grouped_components.items()):
 | 
			
		||||
        if len(components) > 1:
 | 
			
		||||
            groups_to_test.append((platform, signature, components))
 | 
			
		||||
        # Note: Don't add single-component groups to individual_tests here
 | 
			
		||||
        # They'll be added below when we check for ungrouped components
 | 
			
		||||
 | 
			
		||||
    # Add components that weren't grouped on any platform
 | 
			
		||||
    for component in all_tests:
 | 
			
		||||
        if component not in [c for _, _, comps in groups_to_test for c in comps]:
 | 
			
		||||
            individual_tests.add(component)
 | 
			
		||||
 | 
			
		||||
    if groups_to_test:
 | 
			
		||||
        print(f"\n✓ {len(groups_to_test)} groups will be tested together:")
 | 
			
		||||
        for platform, signature, components in groups_to_test:
 | 
			
		||||
            component_list = ", ".join(sorted(components))
 | 
			
		||||
            print(f"  [{platform}] [{signature}]: {component_list}")
 | 
			
		||||
            print(
 | 
			
		||||
                f"    → {len(components)} components in 1 build (saves {len(components) - 1} builds)"
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    if individual_tests:
 | 
			
		||||
        print(f"\n○ {len(individual_tests)} components will be tested individually:")
 | 
			
		||||
        sorted_individual = sorted(individual_tests)
 | 
			
		||||
        for comp in sorted_individual[:10]:
 | 
			
		||||
            print(f"  - {comp}")
 | 
			
		||||
        if len(individual_tests) > 10:
 | 
			
		||||
            print(f"  ... and {len(individual_tests) - 10} more")
 | 
			
		||||
 | 
			
		||||
    # Calculate actual build counts based on test files, not component counts
 | 
			
		||||
    # Without grouping: every test file would be built separately
 | 
			
		||||
    total_test_files = sum(len(test_files) for test_files in all_tests.values())
 | 
			
		||||
 | 
			
		||||
    # With grouping:
 | 
			
		||||
    # - 1 build per group (regardless of how many components)
 | 
			
		||||
    # - Individual components still need all their platform builds
 | 
			
		||||
    individual_test_file_count = sum(
 | 
			
		||||
        len(all_tests[comp]) for comp in individual_tests if comp in all_tests
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    total_grouped_components = sum(len(comps) for _, _, comps in groups_to_test)
 | 
			
		||||
    total_builds_with_grouping = len(groups_to_test) + individual_test_file_count
 | 
			
		||||
    builds_saved = total_test_files - total_builds_with_grouping
 | 
			
		||||
 | 
			
		||||
    print(f"\n{'=' * 80}")
 | 
			
		||||
    print(
 | 
			
		||||
        f"Summary: {total_builds_with_grouping} builds total (vs {total_test_files} without grouping)"
 | 
			
		||||
    )
 | 
			
		||||
    print(
 | 
			
		||||
        f"  • {len(groups_to_test)} grouped builds ({total_grouped_components} components)"
 | 
			
		||||
    )
 | 
			
		||||
    print(
 | 
			
		||||
        f"  • {individual_test_file_count} individual builds ({len(individual_tests)} components)"
 | 
			
		||||
    )
 | 
			
		||||
    if total_test_files > 0:
 | 
			
		||||
        reduction_pct = (builds_saved / total_test_files) * 100
 | 
			
		||||
        print(f"  • Saves {builds_saved} builds ({reduction_pct:.1f}% reduction)")
 | 
			
		||||
    print("=" * 80 + "\n")
 | 
			
		||||
 | 
			
		||||
    # Execute grouped tests
 | 
			
		||||
    for (platform, signature), components in grouped_components.items():
 | 
			
		||||
        # Only group if we have multiple components with same signature
 | 
			
		||||
        if len(components) <= 1:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Filter out components not in our test list
 | 
			
		||||
        components_to_group = [c for c in components if c in all_tests]
 | 
			
		||||
        if len(components_to_group) <= 1:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Get platform base files
 | 
			
		||||
        if platform not in platform_bases:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        for base_file in platform_bases[platform]:
 | 
			
		||||
            platform_with_version = extract_platform_with_version(base_file)
 | 
			
		||||
 | 
			
		||||
            # Skip if platform filter doesn't match
 | 
			
		||||
            if platform_filter and platform != platform_filter:
 | 
			
		||||
                continue
 | 
			
		||||
            if (
 | 
			
		||||
                platform_filter
 | 
			
		||||
                and platform_with_version != platform_filter
 | 
			
		||||
                and not platform_with_version.startswith(f"{platform_filter}-")
 | 
			
		||||
            ):
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            # Run grouped test
 | 
			
		||||
            success, cmd_str = run_grouped_test(
 | 
			
		||||
                components=components_to_group,
 | 
			
		||||
                platform=platform,
 | 
			
		||||
                platform_with_version=platform_with_version,
 | 
			
		||||
                base_file=base_file,
 | 
			
		||||
                build_dir=build_dir,
 | 
			
		||||
                tests_dir=tests_dir,
 | 
			
		||||
                esphome_command=esphome_command,
 | 
			
		||||
                continue_on_fail=continue_on_fail,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # Mark all components as tested
 | 
			
		||||
            for comp in components_to_group:
 | 
			
		||||
                tested_components.add((comp, platform_with_version))
 | 
			
		||||
 | 
			
		||||
            # Record result for each component - show all components in grouped tests
 | 
			
		||||
            test_id = (
 | 
			
		||||
                f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
 | 
			
		||||
            )
 | 
			
		||||
            if success:
 | 
			
		||||
                passed_tests.append(test_id)
 | 
			
		||||
            else:
 | 
			
		||||
                failed_tests.append(test_id)
 | 
			
		||||
                failed_commands[test_id] = cmd_str
 | 
			
		||||
 | 
			
		||||
    return tested_components, passed_tests, failed_tests, failed_commands
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_individual_component_test(
 | 
			
		||||
    component: str,
 | 
			
		||||
    test_file: Path,
 | 
			
		||||
    platform: str,
 | 
			
		||||
    platform_with_version: str,
 | 
			
		||||
    base_file: Path,
 | 
			
		||||
    build_dir: Path,
 | 
			
		||||
    esphome_command: str,
 | 
			
		||||
    continue_on_fail: bool,
 | 
			
		||||
    tested_components: set[tuple[str, str]],
 | 
			
		||||
    passed_tests: list[str],
 | 
			
		||||
    failed_tests: list[str],
 | 
			
		||||
    failed_commands: dict[str, str],
 | 
			
		||||
) -> None:
 | 
			
		||||
    """Run an individual component test if not already tested in a group.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component: Component name
 | 
			
		||||
        test_file: Test file path
 | 
			
		||||
        platform: Platform name
 | 
			
		||||
        platform_with_version: Platform with version
 | 
			
		||||
        base_file: Base file for platform
 | 
			
		||||
        build_dir: Build directory
 | 
			
		||||
        esphome_command: ESPHome command
 | 
			
		||||
        continue_on_fail: Whether to continue on failure
 | 
			
		||||
        tested_components: Set of already tested components
 | 
			
		||||
        passed_tests: List to append passed test IDs
 | 
			
		||||
        failed_tests: List to append failed test IDs
 | 
			
		||||
        failed_commands: Dict to store failed test commands
 | 
			
		||||
    """
 | 
			
		||||
    # Skip if already tested in a group
 | 
			
		||||
    if (component, platform_with_version) in tested_components:
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    test_name = test_file.stem.split(".")[0]
 | 
			
		||||
    success, cmd_str = run_esphome_test(
 | 
			
		||||
        component=component,
 | 
			
		||||
        test_file=test_file,
 | 
			
		||||
        platform=platform,
 | 
			
		||||
        platform_with_version=platform_with_version,
 | 
			
		||||
        base_file=base_file,
 | 
			
		||||
        build_dir=build_dir,
 | 
			
		||||
        esphome_command=esphome_command,
 | 
			
		||||
        continue_on_fail=continue_on_fail,
 | 
			
		||||
    )
 | 
			
		||||
    test_id = f"{component}.{test_name}.{platform_with_version}"
 | 
			
		||||
    if success:
 | 
			
		||||
        passed_tests.append(test_id)
 | 
			
		||||
    else:
 | 
			
		||||
        failed_tests.append(test_id)
 | 
			
		||||
        failed_commands[test_id] = cmd_str
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_components(
 | 
			
		||||
    component_patterns: list[str],
 | 
			
		||||
    platform_filter: str | None,
 | 
			
		||||
    esphome_command: str,
 | 
			
		||||
    continue_on_fail: bool,
 | 
			
		||||
    enable_grouping: bool = True,
 | 
			
		||||
) -> int:
 | 
			
		||||
    """Test components with optional intelligent grouping.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        component_patterns: List of component name patterns
 | 
			
		||||
        platform_filter: Optional platform to filter by
 | 
			
		||||
        esphome_command: ESPHome command (config/compile)
 | 
			
		||||
        continue_on_fail: Whether to continue on failure
 | 
			
		||||
        enable_grouping: Whether to enable component grouping
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        Exit code (0 for success, 1 for failure)
 | 
			
		||||
    """
 | 
			
		||||
    # Setup paths
 | 
			
		||||
    repo_root = Path(__file__).parent.parent
 | 
			
		||||
    tests_dir = repo_root / "tests" / "components"
 | 
			
		||||
    build_components_dir = repo_root / "tests" / "test_build_components"
 | 
			
		||||
    build_dir = build_components_dir / "build"
 | 
			
		||||
    build_dir.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
 | 
			
		||||
    # Get platform base files
 | 
			
		||||
    platform_bases = get_platform_base_files(build_components_dir)
 | 
			
		||||
 | 
			
		||||
    # Find all component tests
 | 
			
		||||
    all_tests = {}
 | 
			
		||||
    for pattern in component_patterns:
 | 
			
		||||
        all_tests.update(find_component_tests(tests_dir, pattern))
 | 
			
		||||
 | 
			
		||||
    if not all_tests:
 | 
			
		||||
        print(f"No components found matching: {component_patterns}")
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    print(f"Found {len(all_tests)} components to test")
 | 
			
		||||
 | 
			
		||||
    # Run tests
 | 
			
		||||
    failed_tests = []
 | 
			
		||||
    passed_tests = []
 | 
			
		||||
    tested_components = set()  # Track which components were tested in groups
 | 
			
		||||
    failed_commands = {}  # Track commands for failed tests
 | 
			
		||||
 | 
			
		||||
    # First, run grouped tests if grouping is enabled
 | 
			
		||||
    if enable_grouping:
 | 
			
		||||
        (
 | 
			
		||||
            tested_components,
 | 
			
		||||
            passed_tests,
 | 
			
		||||
            failed_tests,
 | 
			
		||||
            failed_commands,
 | 
			
		||||
        ) = run_grouped_component_tests(
 | 
			
		||||
            all_tests=all_tests,
 | 
			
		||||
            platform_filter=platform_filter,
 | 
			
		||||
            platform_bases=platform_bases,
 | 
			
		||||
            tests_dir=tests_dir,
 | 
			
		||||
            build_dir=build_dir,
 | 
			
		||||
            esphome_command=esphome_command,
 | 
			
		||||
            continue_on_fail=continue_on_fail,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # Then run individual tests for components not in groups
 | 
			
		||||
    for component, test_files in sorted(all_tests.items()):
 | 
			
		||||
        for test_file in test_files:
 | 
			
		||||
            test_name, platform = parse_test_filename(test_file)
 | 
			
		||||
 | 
			
		||||
            # Handle "all" platform tests
 | 
			
		||||
            if platform == "all":
 | 
			
		||||
                # Run for all platforms
 | 
			
		||||
                for plat, base_files in platform_bases.items():
 | 
			
		||||
                    if platform_filter and plat != platform_filter:
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    for base_file in base_files:
 | 
			
		||||
                        platform_with_version = extract_platform_with_version(base_file)
 | 
			
		||||
                        run_individual_component_test(
 | 
			
		||||
                            component=component,
 | 
			
		||||
                            test_file=test_file,
 | 
			
		||||
                            platform=plat,
 | 
			
		||||
                            platform_with_version=platform_with_version,
 | 
			
		||||
                            base_file=base_file,
 | 
			
		||||
                            build_dir=build_dir,
 | 
			
		||||
                            esphome_command=esphome_command,
 | 
			
		||||
                            continue_on_fail=continue_on_fail,
 | 
			
		||||
                            tested_components=tested_components,
 | 
			
		||||
                            passed_tests=passed_tests,
 | 
			
		||||
                            failed_tests=failed_tests,
 | 
			
		||||
                            failed_commands=failed_commands,
 | 
			
		||||
                        )
 | 
			
		||||
            else:
 | 
			
		||||
                # Platform-specific test
 | 
			
		||||
                if platform_filter and platform != platform_filter:
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                if platform not in platform_bases:
 | 
			
		||||
                    print(f"No base file for platform: {platform}")
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                for base_file in platform_bases[platform]:
 | 
			
		||||
                    platform_with_version = extract_platform_with_version(base_file)
 | 
			
		||||
 | 
			
		||||
                    # Skip if requested platform doesn't match
 | 
			
		||||
                    if (
 | 
			
		||||
                        platform_filter
 | 
			
		||||
                        and platform_with_version != platform_filter
 | 
			
		||||
                        and not platform_with_version.startswith(f"{platform_filter}-")
 | 
			
		||||
                    ):
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    run_individual_component_test(
 | 
			
		||||
                        component=component,
 | 
			
		||||
                        test_file=test_file,
 | 
			
		||||
                        platform=platform,
 | 
			
		||||
                        platform_with_version=platform_with_version,
 | 
			
		||||
                        base_file=base_file,
 | 
			
		||||
                        build_dir=build_dir,
 | 
			
		||||
                        esphome_command=esphome_command,
 | 
			
		||||
                        continue_on_fail=continue_on_fail,
 | 
			
		||||
                        tested_components=tested_components,
 | 
			
		||||
                        passed_tests=passed_tests,
 | 
			
		||||
                        failed_tests=failed_tests,
 | 
			
		||||
                        failed_commands=failed_commands,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
    # Print summary
 | 
			
		||||
    print("\n" + "=" * 80)
 | 
			
		||||
    print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
 | 
			
		||||
    print("=" * 80)
 | 
			
		||||
 | 
			
		||||
    if failed_tests:
 | 
			
		||||
        print("\nFailed tests:")
 | 
			
		||||
        for test in failed_tests:
 | 
			
		||||
            print(f"  - {test}")
 | 
			
		||||
 | 
			
		||||
        # Print failed commands at the end for easy copy-paste from CI logs
 | 
			
		||||
        print("\n" + "=" * 80)
 | 
			
		||||
        print("Failed test commands (copy-paste to reproduce locally):")
 | 
			
		||||
        print("=" * 80)
 | 
			
		||||
        for test in failed_tests:
 | 
			
		||||
            if test in failed_commands:
 | 
			
		||||
                print(f"\n# {test}")
 | 
			
		||||
                print(failed_commands[test])
 | 
			
		||||
        print()
 | 
			
		||||
 | 
			
		||||
        return 1
 | 
			
		||||
 | 
			
		||||
    return 0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main() -> int:
 | 
			
		||||
    """Main entry point."""
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Test ESPHome component builds with intelligent grouping"
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "-e",
 | 
			
		||||
        "--esphome-command",
 | 
			
		||||
        default="compile",
 | 
			
		||||
        choices=["config", "compile", "clean"],
 | 
			
		||||
        help="ESPHome command to run (default: compile)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "-c",
 | 
			
		||||
        "--components",
 | 
			
		||||
        default="*",
 | 
			
		||||
        help="Component pattern(s) to test (default: *). Comma-separated.",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "-t",
 | 
			
		||||
        "--target",
 | 
			
		||||
        help="Target platform to test (e.g., esp32-idf)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "-f",
 | 
			
		||||
        "--continue-on-fail",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Continue testing even if a test fails",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--no-grouping",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Disable component grouping (test each component individually)",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # Parse component patterns
 | 
			
		||||
    component_patterns = [p.strip() for p in args.components.split(",")]
 | 
			
		||||
 | 
			
		||||
    return test_components(
 | 
			
		||||
        component_patterns=component_patterns,
 | 
			
		||||
        platform_filter=args.target,
 | 
			
		||||
        esphome_command=args.esphome_command,
 | 
			
		||||
        continue_on_fail=args.continue_on_fail,
 | 
			
		||||
        enable_grouping=not args.no_grouping,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    sys.exit(main())
 | 
			
		||||
							
								
								
									
										227
									
								
								script/test_component_grouping.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										227
									
								
								script/test_component_grouping.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,227 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
"""Test component grouping by finding and testing groups of components.
 | 
			
		||||
 | 
			
		||||
This script analyzes components, finds groups that can be tested together,
 | 
			
		||||
and runs test builds for those groups.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
# Add esphome to path
 | 
			
		||||
sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
			
		||||
 | 
			
		||||
from script.analyze_component_buses import (
 | 
			
		||||
    analyze_all_components,
 | 
			
		||||
    group_components_by_signature,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def test_component_group(
 | 
			
		||||
    components: list[str],
 | 
			
		||||
    platform: str,
 | 
			
		||||
    esphome_command: str = "compile",
 | 
			
		||||
    dry_run: bool = False,
 | 
			
		||||
) -> bool:
 | 
			
		||||
    """Test a group of components together.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
        components: List of component names to test together
 | 
			
		||||
        platform: Platform to test on (e.g., "esp32-idf")
 | 
			
		||||
        esphome_command: ESPHome command to run (config/compile/clean)
 | 
			
		||||
        dry_run: If True, only print the command without running it
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
        True if test passed, False otherwise
 | 
			
		||||
    """
 | 
			
		||||
    components_str = ",".join(components)
 | 
			
		||||
    cmd = [
 | 
			
		||||
        "./script/test_build_components",
 | 
			
		||||
        "-c",
 | 
			
		||||
        components_str,
 | 
			
		||||
        "-t",
 | 
			
		||||
        platform,
 | 
			
		||||
        "-e",
 | 
			
		||||
        esphome_command,
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    print(f"\n{'=' * 80}")
 | 
			
		||||
    print(f"Testing {len(components)} components on {platform}:")
 | 
			
		||||
    for comp in components:
 | 
			
		||||
        print(f"  - {comp}")
 | 
			
		||||
    print(f"{'=' * 80}")
 | 
			
		||||
    print(f"Command: {' '.join(cmd)}\n")
 | 
			
		||||
 | 
			
		||||
    if dry_run:
 | 
			
		||||
        print("[DRY RUN] Skipping actual test")
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        result = subprocess.run(cmd, check=False)
 | 
			
		||||
        return result.returncode == 0
 | 
			
		||||
    except Exception as e:
 | 
			
		||||
        print(f"Error running test: {e}")
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main() -> None:
 | 
			
		||||
    """Main entry point."""
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Test component grouping by finding and testing groups"
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--platform",
 | 
			
		||||
        "-p",
 | 
			
		||||
        default="esp32-idf",
 | 
			
		||||
        help="Platform to test (default: esp32-idf)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "-e",
 | 
			
		||||
        "--esphome-command",
 | 
			
		||||
        default="compile",
 | 
			
		||||
        choices=["config", "compile", "clean"],
 | 
			
		||||
        help="ESPHome command to run (default: compile)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--all",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Test all components (sets --min-size=1, --max-size=10000, --max-groups=10000)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--min-size",
 | 
			
		||||
        type=int,
 | 
			
		||||
        default=3,
 | 
			
		||||
        help="Minimum group size to test (default: 3)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--max-size",
 | 
			
		||||
        type=int,
 | 
			
		||||
        default=10,
 | 
			
		||||
        help="Maximum group size to test (default: 10)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--max-groups",
 | 
			
		||||
        type=int,
 | 
			
		||||
        default=5,
 | 
			
		||||
        help="Maximum number of groups to test (default: 5)",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--signature",
 | 
			
		||||
        "-s",
 | 
			
		||||
        help="Only test groups with this bus signature (e.g., 'spi', 'i2c', 'uart')",
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "--dry-run",
 | 
			
		||||
        action="store_true",
 | 
			
		||||
        help="Print commands without running them",
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
 | 
			
		||||
    # If --all is specified, test all components without grouping
 | 
			
		||||
    if args.all:
 | 
			
		||||
        # Get all components from tests/components directory
 | 
			
		||||
        components_dir = Path("tests/components")
 | 
			
		||||
        all_components = sorted(
 | 
			
		||||
            [d.name for d in components_dir.iterdir() if d.is_dir()]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if not all_components:
 | 
			
		||||
            print(f"\nNo components found in {components_dir}")
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        print(f"\nTesting all {len(all_components)} components together")
 | 
			
		||||
 | 
			
		||||
        success = test_component_group(
 | 
			
		||||
            all_components, args.platform, args.esphome_command, args.dry_run
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Print summary
 | 
			
		||||
        print(f"\n{'=' * 80}")
 | 
			
		||||
        print("TEST SUMMARY")
 | 
			
		||||
        print(f"{'=' * 80}")
 | 
			
		||||
        status = "✅ PASS" if success else "❌ FAIL"
 | 
			
		||||
        print(f"{status} All components: {len(all_components)} components")
 | 
			
		||||
 | 
			
		||||
        if not args.dry_run and not success:
 | 
			
		||||
            sys.exit(1)
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    print("Analyzing all components...")
 | 
			
		||||
    components, non_groupable, _ = analyze_all_components(Path("tests/components"))
 | 
			
		||||
 | 
			
		||||
    print(f"Found {len(components)} components, {len(non_groupable)} non-groupable")
 | 
			
		||||
 | 
			
		||||
    # Group components by signature for the platform
 | 
			
		||||
    groups = group_components_by_signature(components, args.platform)
 | 
			
		||||
 | 
			
		||||
    # Filter and sort groups
 | 
			
		||||
    filtered_groups = []
 | 
			
		||||
    for signature, comp_list in groups.items():
 | 
			
		||||
        # Filter by signature if specified
 | 
			
		||||
        if args.signature and signature != args.signature:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # Remove non-groupable components
 | 
			
		||||
        comp_list = [c for c in comp_list if c not in non_groupable]
 | 
			
		||||
 | 
			
		||||
        # Filter by minimum size
 | 
			
		||||
        if len(comp_list) < args.min_size:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # If group is larger than max_size, we'll take a subset later
 | 
			
		||||
        filtered_groups.append((signature, comp_list))
 | 
			
		||||
 | 
			
		||||
    # Sort by group size (largest first)
 | 
			
		||||
    filtered_groups.sort(key=lambda x: len(x[1]), reverse=True)
 | 
			
		||||
 | 
			
		||||
    # Limit number of groups
 | 
			
		||||
    filtered_groups = filtered_groups[: args.max_groups]
 | 
			
		||||
 | 
			
		||||
    if not filtered_groups:
 | 
			
		||||
        print("\nNo groups found matching criteria:")
 | 
			
		||||
        print(f"  - Platform: {args.platform}")
 | 
			
		||||
        print(f"  - Size: {args.min_size}-{args.max_size}")
 | 
			
		||||
        if args.signature:
 | 
			
		||||
            print(f"  - Signature: {args.signature}")
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    print(f"\nFound {len(filtered_groups)} groups to test:")
 | 
			
		||||
    for signature, comp_list in filtered_groups:
 | 
			
		||||
        print(f"  [{signature}]: {len(comp_list)} components")
 | 
			
		||||
 | 
			
		||||
    # Test each group
 | 
			
		||||
    results = []
 | 
			
		||||
    for signature, comp_list in filtered_groups:
 | 
			
		||||
        # Limit to max_size if group is larger
 | 
			
		||||
        if len(comp_list) > args.max_size:
 | 
			
		||||
            comp_list = comp_list[: args.max_size]
 | 
			
		||||
 | 
			
		||||
        success = test_component_group(
 | 
			
		||||
            comp_list, args.platform, args.esphome_command, args.dry_run
 | 
			
		||||
        )
 | 
			
		||||
        results.append((signature, comp_list, success))
 | 
			
		||||
 | 
			
		||||
        if not args.dry_run and not success:
 | 
			
		||||
            print(f"\n❌ FAILED: {signature} group")
 | 
			
		||||
            break
 | 
			
		||||
 | 
			
		||||
    # Print summary
 | 
			
		||||
    print(f"\n{'=' * 80}")
 | 
			
		||||
    print("TEST SUMMARY")
 | 
			
		||||
    print(f"{'=' * 80}")
 | 
			
		||||
    for signature, comp_list, success in results:
 | 
			
		||||
        status = "✅ PASS" if success else "❌ FAIL"
 | 
			
		||||
        print(f"{status} [{signature}]: {len(comp_list)} components")
 | 
			
		||||
 | 
			
		||||
    # Exit with error if any tests failed
 | 
			
		||||
    if not args.dry_run and any(not success for _, _, success in results):
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    main()
 | 
			
		||||
@@ -1,11 +1,4 @@
 | 
			
		||||
uart:
 | 
			
		||||
  - id: uart_a01nyub
 | 
			
		||||
    tx_pin: ${tx_pin}
 | 
			
		||||
    rx_pin: ${rx_pin}
 | 
			
		||||
    baud_rate: 9600
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - platform: a01nyub
 | 
			
		||||
    id: a01nyub_sensor
 | 
			
		||||
    name: a01nyub Distance
 | 
			
		||||
    uart_id: uart_a01nyub
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO17
 | 
			
		||||
  rx_pin: GPIO16
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,5 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,3 +1,6 @@
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
 | 
			
		||||
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,8 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO17
 | 
			
		||||
  rx_pin: GPIO16
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,4 @@
 | 
			
		||||
uart:
 | 
			
		||||
  - id: uart_a02yyuw
 | 
			
		||||
    tx_pin: ${tx_pin}
 | 
			
		||||
    rx_pin: ${rx_pin}
 | 
			
		||||
    baud_rate: 9600
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - platform: a02yyuw
 | 
			
		||||
    id: a02yyuw_sensor
 | 
			
		||||
    name: a02yyuw Distance
 | 
			
		||||
    uart_id: uart_a02yyuw
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO17
 | 
			
		||||
  rx_pin: GPIO16
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,5 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,3 +1,6 @@
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
 | 
			
		||||
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,8 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO17
 | 
			
		||||
  rx_pin: GPIO16
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  tx_pin: GPIO4
 | 
			
		||||
  rx_pin: GPIO5
 | 
			
		||||
packages:
 | 
			
		||||
  uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  step_pin: GPIO22
 | 
			
		||||
  dir_pin: GPIO23
 | 
			
		||||
  sleep_pin: GPIO25
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  step_pin: GPIO2
 | 
			
		||||
  dir_pin: GPIO3
 | 
			
		||||
  sleep_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  step_pin: GPIO22
 | 
			
		||||
  dir_pin: GPIO23
 | 
			
		||||
  dir_pin: GPIO4
 | 
			
		||||
  sleep_pin: GPIO25
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  step_pin: GPIO1
 | 
			
		||||
  dir_pin: GPIO2
 | 
			
		||||
  sleep_pin: GPIO5
 | 
			
		||||
  sleep_pin: GPIO0
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  gate_pin: GPIO18
 | 
			
		||||
  zero_cross_pin: GPIO19
 | 
			
		||||
  gate_pin: GPIO4
 | 
			
		||||
  zero_cross_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  gate_pin: GPIO5
 | 
			
		||||
  zero_cross_pin: GPIO4
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  gate_pin: GPIO5
 | 
			
		||||
  zero_cross_pin: GPIO4
 | 
			
		||||
  gate_pin: GPIO0
 | 
			
		||||
  zero_cross_pin: GPIO2
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +0,0 @@
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
@@ -1,7 +1,11 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: P23
 | 
			
		||||
    attenuation: !remove
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: A0
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 4
 | 
			
		||||
@@ -1,6 +1,12 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 4
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: GPIO1
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,12 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: A0
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,12 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: GPIO50
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: GPIO16
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 1
 | 
			
		||||
@@ -1,6 +1,12 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 1
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: GPIO1
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +0,0 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 1
 | 
			
		||||
@@ -1,6 +1,12 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: 1
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: GPIO1
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    attenuation: 2.5db
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,11 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: VCC
 | 
			
		||||
    attenuation: !remove
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,11 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
    pin: PA0
 | 
			
		||||
    attenuation: !remove
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: A5
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,11 @@
 | 
			
		||||
packages:
 | 
			
		||||
  base: !include common.yaml
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - id: !extend my_sensor
 | 
			
		||||
  - id: my_sensor
 | 
			
		||||
    platform: adc
 | 
			
		||||
    pin: VCC
 | 
			
		||||
    attenuation: !remove
 | 
			
		||||
    name: ADC Test sensor
 | 
			
		||||
    update_interval: "1:01"
 | 
			
		||||
    unit_of_measurement: "°C"
 | 
			
		||||
    icon: "mdi:water-percent"
 | 
			
		||||
    accuracy_decimals: 5
 | 
			
		||||
    setup_priority: -100
 | 
			
		||||
    force_update: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,3 @@
 | 
			
		||||
spi:
 | 
			
		||||
  - id: spi_adc128s102
 | 
			
		||||
    clk_pin: ${clk_pin}
 | 
			
		||||
    mosi_pin: ${mosi_pin}
 | 
			
		||||
    miso_pin: ${miso_pin}
 | 
			
		||||
 | 
			
		||||
adc128s102:
 | 
			
		||||
  cs_pin: ${cs_pin}
 | 
			
		||||
  id: adc128s102_adc
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  clk_pin: GPIO16
 | 
			
		||||
  mosi_pin: GPIO17
 | 
			
		||||
  miso_pin: GPIO15
 | 
			
		||||
  cs_pin: GPIO12
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,7 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  clk_pin: GPIO6
 | 
			
		||||
  mosi_pin: GPIO7
 | 
			
		||||
  miso_pin: GPIO5
 | 
			
		||||
  cs_pin: GPIO2
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  clk_pin: GPIO6
 | 
			
		||||
  mosi_pin: GPIO7
 | 
			
		||||
  miso_pin: GPIO5
 | 
			
		||||
  cs_pin: GPIO2
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  spi: !include ../../test_build_components/common/spi/esp32-c3-idf.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  clk_pin: GPIO16
 | 
			
		||||
  mosi_pin: GPIO17
 | 
			
		||||
  miso_pin: GPIO15
 | 
			
		||||
  cs_pin: GPIO12
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  spi: !include ../../test_build_components/common/spi/esp32-idf.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,10 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  clk_pin: GPIO14
 | 
			
		||||
  mosi_pin: GPIO13
 | 
			
		||||
  miso_pin: GPIO12
 | 
			
		||||
  clk_pin: GPIO0
 | 
			
		||||
  mosi_pin: GPIO2
 | 
			
		||||
  miso_pin: GPIO16
 | 
			
		||||
  cs_pin: GPIO15
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  spi: !include ../../test_build_components/common/spi/esp8266-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -4,4 +4,7 @@ substitutions:
 | 
			
		||||
  miso_pin: GPIO4
 | 
			
		||||
  cs_pin: GPIO5
 | 
			
		||||
 | 
			
		||||
packages:
 | 
			
		||||
  spi: !include ../../test_build_components/common/spi/rp2040-ard.yaml
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  pin: GPIO2
 | 
			
		||||
 | 
			
		||||
<<: !include common-ard-esp32_rmt_led_strip.yaml
 | 
			
		||||
@@ -1,4 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  pin: GPIO2
 | 
			
		||||
 | 
			
		||||
<<: !include common-ard-esp32_rmt_led_strip.yaml
 | 
			
		||||
@@ -1,4 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  pin: GPIO2
 | 
			
		||||
 | 
			
		||||
<<: !include common-ard-fastled.yaml
 | 
			
		||||
@@ -1,11 +1,6 @@
 | 
			
		||||
i2c:
 | 
			
		||||
  - id: i2c_ade7880
 | 
			
		||||
    scl: ${scl_pin}
 | 
			
		||||
    sda: ${sda_pin}
 | 
			
		||||
 | 
			
		||||
sensor:
 | 
			
		||||
  - platform: ade7880
 | 
			
		||||
    i2c_id: i2c_ade7880
 | 
			
		||||
    i2c_id: i2c_bus
 | 
			
		||||
    irq0_pin: ${irq0_pin}
 | 
			
		||||
    irq1_pin: ${irq1_pin}
 | 
			
		||||
    reset_pin: ${reset_pin}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  scl_pin: GPIO5
 | 
			
		||||
  sda_pin: GPIO4
 | 
			
		||||
  irq0_pin: GPIO13
 | 
			
		||||
  irq1_pin: GPIO15
 | 
			
		||||
  reset_pin: GPIO16
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
@@ -1,8 +0,0 @@
 | 
			
		||||
substitutions:
 | 
			
		||||
  scl_pin: GPIO5
 | 
			
		||||
  sda_pin: GPIO4
 | 
			
		||||
  irq0_pin: GPIO6
 | 
			
		||||
  irq1_pin: GPIO7
 | 
			
		||||
  reset_pin: GPIO10
 | 
			
		||||
 | 
			
		||||
<<: !include common.yaml
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user