mirror of
				https://github.com/esphome/esphome.git
				synced 2025-11-04 09:01:49 +00:00 
			
		
		
		
	Compare commits
	
		
			74 Commits
		
	
	
		
			2025.10.0b
			...
			2025.10.3
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					6a478b9070 | ||
| 
						 | 
					a32a1d11fb | ||
| 
						 | 
					daeb8ef88c | ||
| 
						 | 
					febee437d6 | ||
| 
						 | 
					de2f475dbd | ||
| 
						 | 
					ebc0f5f7c9 | ||
| 
						 | 
					87ca8784ef | ||
| 
						 | 
					a186c1062f | ||
| 
						 | 
					ea38237f29 | ||
| 
						 | 
					6aff1394ad | ||
| 
						 | 
					0e34d1b64d | ||
| 
						 | 
					1483cee0fb | ||
| 
						 | 
					8c1bd2fd85 | ||
| 
						 | 
					ea609dc0f6 | ||
| 
						 | 
					913095f6be | ||
| 
						 | 
					bb24ad4a30 | ||
| 
						 | 
					0d612fecfc | ||
| 
						 | 
					9c235b4140 | ||
| 
						 | 
					070b0882b8 | ||
| 
						 | 
					7f1173fcba | ||
| 
						 | 
					a75ccf841c | ||
| 
						 | 
					56eb605ec9 | ||
| 
						 | 
					2c4818de00 | ||
| 
						 | 
					2b94de8732 | ||
| 
						 | 
					f71aed3a5c | ||
| 
						 | 
					353e097085 | ||
| 
						 | 
					9a29dec6d9 | ||
| 
						 | 
					63b113d823 | ||
| 
						 | 
					0381644605 | ||
| 
						 | 
					48a557b005 | ||
| 
						 | 
					780ece73ff | ||
| 
						 | 
					d7fcf8d57b | ||
| 
						 | 
					82a3ca575f | ||
| 
						 | 
					5913da5a89 | ||
| 
						 | 
					8c13105ce1 | ||
| 
						 | 
					c3fd07f8bc | ||
| 
						 | 
					d02ed41eb4 | ||
| 
						 | 
					07504c8208 | ||
| 
						 | 
					b666b8e261 | ||
| 
						 | 
					8627b56e36 | ||
| 
						 | 
					69df07ddcf | ||
| 
						 | 
					13cfa30c67 | ||
| 
						 | 
					da1959ab5d | ||
| 
						 | 
					2b42903e9c | ||
| 
						 | 
					742c9cbb53 | ||
| 
						 | 
					e4bc465a3d | ||
| 
						 | 
					5cec0941f8 | ||
| 
						 | 
					72a7aeb430 | ||
| 
						 | 
					53e6b28092 | ||
| 
						 | 
					7f3c7bb5c6 | ||
| 
						 | 
					c02c0b2a96 | ||
| 
						 | 
					5f5092e29f | ||
| 
						 | 
					2864bf1674 | ||
| 
						 | 
					132e949927 | ||
| 
						 | 
					8fa44e471d | ||
| 
						 | 
					ccedcfb600 | ||
| 
						 | 
					8b0ec0afe3 | ||
| 
						 | 
					dca29ed89b | ||
| 
						 | 
					728726e29e | ||
| 
						 | 
					79f4ca20b8 | ||
| 
						 | 
					3eca72e0b8 | ||
| 
						 | 
					22c0f55cef | ||
| 
						 | 
					fd8ecc9608 | ||
| 
						 | 
					ac96a59d58 | ||
| 
						 | 
					dceed992d8 | ||
| 
						 | 
					b0c66c1c09 | ||
| 
						 | 
					8f04a5b944 | ||
| 
						 | 
					e6c21df30b | ||
| 
						 | 
					842cb9033a | ||
| 
						 | 
					a2cb415dfa | ||
| 
						 | 
					1fac193535 | ||
| 
						 | 
					34632f78cf | ||
| 
						 | 
					b93c60e85a | ||
| 
						 | 
					60dc055509 | 
@@ -186,6 +186,11 @@ This document provides essential context for AI models interacting with this pro
 | 
				
			|||||||
        └── components/[component]/ # Component-specific tests
 | 
					        └── components/[component]/ # Component-specific tests
 | 
				
			||||||
        ```
 | 
					        ```
 | 
				
			||||||
        Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
 | 
					        Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
 | 
				
			||||||
 | 
					    *   **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
 | 
				
			||||||
 | 
					        ```bash
 | 
				
			||||||
 | 
					        ./script/test_component_grouping.py -e config --all
 | 
				
			||||||
 | 
					        ```
 | 
				
			||||||
 | 
					        This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
 | 
				
			||||||
*   **Debugging and Troubleshooting:**
 | 
					*   **Debugging and Troubleshooting:**
 | 
				
			||||||
    *   **Debug Tools:**
 | 
					    *   **Debug Tools:**
 | 
				
			||||||
        - `esphome config <file>.yaml` to validate configuration.
 | 
					        - `esphome config <file>.yaml` to validate configuration.
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										100
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										100
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							@@ -177,6 +177,7 @@ jobs:
 | 
				
			|||||||
      clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
 | 
					      clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
 | 
				
			||||||
      python-linters: ${{ steps.determine.outputs.python-linters }}
 | 
					      python-linters: ${{ steps.determine.outputs.python-linters }}
 | 
				
			||||||
      changed-components: ${{ steps.determine.outputs.changed-components }}
 | 
					      changed-components: ${{ steps.determine.outputs.changed-components }}
 | 
				
			||||||
 | 
					      changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
 | 
				
			||||||
      component-test-count: ${{ steps.determine.outputs.component-test-count }}
 | 
					      component-test-count: ${{ steps.determine.outputs.component-test-count }}
 | 
				
			||||||
    steps:
 | 
					    steps:
 | 
				
			||||||
      - name: Check out code from GitHub
 | 
					      - name: Check out code from GitHub
 | 
				
			||||||
@@ -204,6 +205,7 @@ jobs:
 | 
				
			|||||||
          echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
 | 
					          echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
 | 
				
			||||||
          echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
 | 
					          echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
 | 
				
			||||||
          echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
 | 
					          echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
 | 
				
			||||||
 | 
					          echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
 | 
				
			||||||
          echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
 | 
					          echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  integration-tests:
 | 
					  integration-tests:
 | 
				
			||||||
@@ -367,12 +369,13 @@ jobs:
 | 
				
			|||||||
      fail-fast: false
 | 
					      fail-fast: false
 | 
				
			||||||
      max-parallel: 2
 | 
					      max-parallel: 2
 | 
				
			||||||
      matrix:
 | 
					      matrix:
 | 
				
			||||||
        file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
 | 
					        file: ${{ fromJson(needs.determine-jobs.outputs.changed-components-with-tests) }}
 | 
				
			||||||
    steps:
 | 
					    steps:
 | 
				
			||||||
      - name: Install dependencies
 | 
					      - name: Cache apt packages
 | 
				
			||||||
        run: |
 | 
					        uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
 | 
				
			||||||
          sudo apt-get update
 | 
					        with:
 | 
				
			||||||
          sudo apt-get install libsdl2-dev
 | 
					          packages: libsdl2-dev
 | 
				
			||||||
 | 
					          version: 1.0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Check out code from GitHub
 | 
					      - name: Check out code from GitHub
 | 
				
			||||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
					        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
				
			||||||
@@ -381,17 +384,17 @@ jobs:
 | 
				
			|||||||
        with:
 | 
					        with:
 | 
				
			||||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
					          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
				
			||||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
					          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
				
			||||||
      - name: test_build_components -e config -c ${{ matrix.file }}
 | 
					      - name: Validate config for ${{ matrix.file }}
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          . venv/bin/activate
 | 
					          . venv/bin/activate
 | 
				
			||||||
          ./script/test_build_components -e config -c ${{ matrix.file }}
 | 
					          python3 script/test_build_components.py -e config -c ${{ matrix.file }}
 | 
				
			||||||
      - name: test_build_components -e compile -c ${{ matrix.file }}
 | 
					      - name: Compile config for ${{ matrix.file }}
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          . venv/bin/activate
 | 
					          . venv/bin/activate
 | 
				
			||||||
          ./script/test_build_components -e compile -c ${{ matrix.file }}
 | 
					          python3 script/test_build_components.py -e compile -c ${{ matrix.file }}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  test-build-components-splitter:
 | 
					  test-build-components-splitter:
 | 
				
			||||||
    name: Split components for testing into 10 components per group
 | 
					    name: Split components for intelligent grouping (40 weighted per batch)
 | 
				
			||||||
    runs-on: ubuntu-24.04
 | 
					    runs-on: ubuntu-24.04
 | 
				
			||||||
    needs:
 | 
					    needs:
 | 
				
			||||||
      - common
 | 
					      - common
 | 
				
			||||||
@@ -402,14 +405,26 @@ jobs:
 | 
				
			|||||||
    steps:
 | 
					    steps:
 | 
				
			||||||
      - name: Check out code from GitHub
 | 
					      - name: Check out code from GitHub
 | 
				
			||||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
					        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
				
			||||||
      - name: Split components into groups of 10
 | 
					      - name: Restore Python
 | 
				
			||||||
 | 
					        uses: ./.github/actions/restore-python
 | 
				
			||||||
 | 
					        with:
 | 
				
			||||||
 | 
					          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
				
			||||||
 | 
					          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
				
			||||||
 | 
					      - name: Split components intelligently based on bus configurations
 | 
				
			||||||
        id: split
 | 
					        id: split
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(10) | join(" ")]')
 | 
					          . venv/bin/activate
 | 
				
			||||||
          echo "components=$components" >> $GITHUB_OUTPUT
 | 
					
 | 
				
			||||||
 | 
					          # Use intelligent splitter that groups components with same bus configs
 | 
				
			||||||
 | 
					          components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          echo "Splitting components intelligently..."
 | 
				
			||||||
 | 
					          output=$(python3 script/split_components_for_ci.py --components "$components" --batch-size 40 --output github)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          echo "$output" >> $GITHUB_OUTPUT
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  test-build-components-split:
 | 
					  test-build-components-split:
 | 
				
			||||||
    name: Test split components
 | 
					    name: Test components batch (${{ matrix.components }})
 | 
				
			||||||
    runs-on: ubuntu-24.04
 | 
					    runs-on: ubuntu-24.04
 | 
				
			||||||
    needs:
 | 
					    needs:
 | 
				
			||||||
      - common
 | 
					      - common
 | 
				
			||||||
@@ -418,17 +433,23 @@ jobs:
 | 
				
			|||||||
    if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
 | 
					    if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
 | 
				
			||||||
    strategy:
 | 
					    strategy:
 | 
				
			||||||
      fail-fast: false
 | 
					      fail-fast: false
 | 
				
			||||||
      max-parallel: 4
 | 
					      max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }}
 | 
				
			||||||
      matrix:
 | 
					      matrix:
 | 
				
			||||||
        components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
 | 
					        components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
 | 
				
			||||||
    steps:
 | 
					    steps:
 | 
				
			||||||
 | 
					      - name: Show disk space
 | 
				
			||||||
 | 
					        run: |
 | 
				
			||||||
 | 
					          echo "Available disk space:"
 | 
				
			||||||
 | 
					          df -h
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: List components
 | 
					      - name: List components
 | 
				
			||||||
        run: echo ${{ matrix.components }}
 | 
					        run: echo ${{ matrix.components }}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Install dependencies
 | 
					      - name: Cache apt packages
 | 
				
			||||||
        run: |
 | 
					        uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
 | 
				
			||||||
          sudo apt-get update
 | 
					        with:
 | 
				
			||||||
          sudo apt-get install libsdl2-dev
 | 
					          packages: libsdl2-dev
 | 
				
			||||||
 | 
					          version: 1.0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      - name: Check out code from GitHub
 | 
					      - name: Check out code from GitHub
 | 
				
			||||||
        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
					        uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
 | 
				
			||||||
@@ -437,20 +458,37 @@ jobs:
 | 
				
			|||||||
        with:
 | 
					        with:
 | 
				
			||||||
          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
					          python-version: ${{ env.DEFAULT_PYTHON }}
 | 
				
			||||||
          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
					          cache-key: ${{ needs.common.outputs.cache-key }}
 | 
				
			||||||
      - name: Validate config
 | 
					      - name: Validate and compile components with intelligent grouping
 | 
				
			||||||
        run: |
 | 
					        run: |
 | 
				
			||||||
          . venv/bin/activate
 | 
					          . venv/bin/activate
 | 
				
			||||||
          for component in ${{ matrix.components }}; do
 | 
					          # Use /mnt for build files (70GB available vs ~29GB on /)
 | 
				
			||||||
            ./script/test_build_components -e config -c $component
 | 
					          # Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
 | 
				
			||||||
          done
 | 
					          sudo mkdir -p /mnt/platformio
 | 
				
			||||||
      - name: Compile config
 | 
					          sudo chown $USER:$USER /mnt/platformio
 | 
				
			||||||
        run: |
 | 
					          mkdir -p ~/.platformio
 | 
				
			||||||
          . venv/bin/activate
 | 
					          sudo mount --bind /mnt/platformio ~/.platformio
 | 
				
			||||||
          mkdir build_cache
 | 
					
 | 
				
			||||||
          export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
 | 
					          # Bind mount test build directory to /mnt
 | 
				
			||||||
          for component in ${{ matrix.components }}; do
 | 
					          sudo mkdir -p /mnt/test_build_components_build
 | 
				
			||||||
            ./script/test_build_components -e compile -c $component
 | 
					          sudo chown $USER:$USER /mnt/test_build_components_build
 | 
				
			||||||
          done
 | 
					          mkdir -p tests/test_build_components/build
 | 
				
			||||||
 | 
					          sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          # Convert space-separated components to comma-separated for Python script
 | 
				
			||||||
 | 
					          components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          echo "Testing components: $components_csv"
 | 
				
			||||||
 | 
					          echo ""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          # Run config validation with grouping
 | 
				
			||||||
 | 
					          python3 script/test_build_components.py -e config -c "$components_csv" -f
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          echo ""
 | 
				
			||||||
 | 
					          echo "Config validation passed! Starting compilation..."
 | 
				
			||||||
 | 
					          echo ""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					          # Run compilation with grouping
 | 
				
			||||||
 | 
					          python3 script/test_build_components.py -e compile -c "$components_csv" -f
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  pre-commit-ci-lite:
 | 
					  pre-commit-ci-lite:
 | 
				
			||||||
    name: pre-commit.ci lite
 | 
					    name: pre-commit.ci lite
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							@@ -48,7 +48,7 @@ PROJECT_NAME           = ESPHome
 | 
				
			|||||||
# could be handy for archiving the generated documentation or if some version
 | 
					# could be handy for archiving the generated documentation or if some version
 | 
				
			||||||
# control system is used.
 | 
					# control system is used.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
PROJECT_NUMBER         = 2025.10.0b1
 | 
					PROJECT_NUMBER         = 2025.10.3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
 | 
					# Using the PROJECT_BRIEF tag one can provide an optional one line description
 | 
				
			||||||
# for a project that appears at the top of each page and should give viewer a
 | 
					# for a project that appears at the top of each page and should give viewer a
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -117,6 +117,17 @@ class Purpose(StrEnum):
 | 
				
			|||||||
    LOGGING = "logging"
 | 
					    LOGGING = "logging"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class PortType(StrEnum):
 | 
				
			||||||
 | 
					    SERIAL = "SERIAL"
 | 
				
			||||||
 | 
					    NETWORK = "NETWORK"
 | 
				
			||||||
 | 
					    MQTT = "MQTT"
 | 
				
			||||||
 | 
					    MQTTIP = "MQTTIP"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Magic MQTT port types that require special handling
 | 
				
			||||||
 | 
					_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
 | 
					def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
 | 
				
			||||||
    """Resolve an address using cache if available, otherwise return the address itself."""
 | 
					    """Resolve an address using cache if available, otherwise return the address itself."""
 | 
				
			||||||
    if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
 | 
					    if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
 | 
				
			||||||
@@ -174,7 +185,9 @@ def choose_upload_log_host(
 | 
				
			|||||||
            else:
 | 
					            else:
 | 
				
			||||||
                resolved.append(device)
 | 
					                resolved.append(device)
 | 
				
			||||||
        if not resolved:
 | 
					        if not resolved:
 | 
				
			||||||
            _LOGGER.error("All specified devices: %s could not be resolved.", defaults)
 | 
					            raise EsphomeError(
 | 
				
			||||||
 | 
					                f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
        return resolved
 | 
					        return resolved
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # No devices specified, show interactive chooser
 | 
					    # No devices specified, show interactive chooser
 | 
				
			||||||
@@ -268,8 +281,10 @@ def has_ip_address() -> bool:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def has_resolvable_address() -> bool:
 | 
					def has_resolvable_address() -> bool:
 | 
				
			||||||
    """Check if CORE.address is resolvable (via mDNS or is an IP address)."""
 | 
					    """Check if CORE.address is resolvable (via mDNS, DNS, or is an IP address)."""
 | 
				
			||||||
    return has_mdns() or has_ip_address()
 | 
					    # Any address (IP, mDNS hostname, or regular DNS hostname) is resolvable
 | 
				
			||||||
 | 
					    # The resolve_ip_address() function in helpers.py handles all types via AsyncResolver
 | 
				
			||||||
 | 
					    return CORE.address is not None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str):
 | 
					def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str):
 | 
				
			||||||
@@ -278,16 +293,67 @@ def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str
 | 
				
			|||||||
    return mqtt.get_esphome_device_ip(config, username, password, client_id)
 | 
					    return mqtt.get_esphome_device_ip(config, username, password, client_id)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_PORT_TO_PORT_TYPE = {
 | 
					def _resolve_network_devices(
 | 
				
			||||||
    "MQTT": "MQTT",
 | 
					    devices: list[str], config: ConfigType, args: ArgsProtocol
 | 
				
			||||||
    "MQTTIP": "MQTTIP",
 | 
					) -> list[str]:
 | 
				
			||||||
}
 | 
					    """Resolve device list, converting MQTT magic strings to actual IP addresses.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    This function filters the devices list to:
 | 
				
			||||||
 | 
					    - Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
 | 
				
			||||||
 | 
					    - Deduplicate addresses while preserving order
 | 
				
			||||||
 | 
					    - Only resolve MQTT once even if multiple MQTT strings are present
 | 
				
			||||||
 | 
					    - If MQTT resolution fails, log a warning and continue with other devices
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        devices: List of device identifiers (IPs, hostnames, or magic strings)
 | 
				
			||||||
 | 
					        config: ESPHome configuration
 | 
				
			||||||
 | 
					        args: Command-line arguments containing MQTT credentials
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        List of network addresses suitable for connection attempts
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    network_devices: list[str] = []
 | 
				
			||||||
 | 
					    mqtt_resolved: bool = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for device in devices:
 | 
				
			||||||
 | 
					        port_type = get_port_type(device)
 | 
				
			||||||
 | 
					        if port_type in _MQTT_PORT_TYPES:
 | 
				
			||||||
 | 
					            # Only resolve MQTT once, even if multiple MQTT entries
 | 
				
			||||||
 | 
					            if not mqtt_resolved:
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    mqtt_ips = mqtt_get_ip(
 | 
				
			||||||
 | 
					                        config, args.username, args.password, args.client_id
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                    network_devices.extend(mqtt_ips)
 | 
				
			||||||
 | 
					                except EsphomeError as err:
 | 
				
			||||||
 | 
					                    _LOGGER.warning(
 | 
				
			||||||
 | 
					                        "MQTT IP discovery failed (%s), will try other devices if available",
 | 
				
			||||||
 | 
					                        err,
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                mqtt_resolved = True
 | 
				
			||||||
 | 
					        elif device not in network_devices:
 | 
				
			||||||
 | 
					            # Regular network address or IP - add if not already present
 | 
				
			||||||
 | 
					            network_devices.append(device)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return network_devices
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_port_type(port: str) -> str:
 | 
					def get_port_type(port: str) -> PortType:
 | 
				
			||||||
 | 
					    """Determine the type of port/device identifier.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
 | 
				
			||||||
 | 
					        PortType.MQTT for MQTT logging
 | 
				
			||||||
 | 
					        PortType.MQTTIP for MQTT IP lookup
 | 
				
			||||||
 | 
					        PortType.NETWORK for IP addresses, hostnames, or mDNS names
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
    if port.startswith("/") or port.startswith("COM"):
 | 
					    if port.startswith("/") or port.startswith("COM"):
 | 
				
			||||||
        return "SERIAL"
 | 
					        return PortType.SERIAL
 | 
				
			||||||
    return _PORT_TO_PORT_TYPE.get(port, "NETWORK")
 | 
					    if port == "MQTT":
 | 
				
			||||||
 | 
					        return PortType.MQTT
 | 
				
			||||||
 | 
					    if port == "MQTTIP":
 | 
				
			||||||
 | 
					        return PortType.MQTTIP
 | 
				
			||||||
 | 
					    return PortType.NETWORK
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_miniterm(config: ConfigType, port: str, args) -> int:
 | 
					def run_miniterm(config: ConfigType, port: str, args) -> int:
 | 
				
			||||||
@@ -487,7 +553,7 @@ def upload_using_platformio(config: ConfigType, port: str):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def check_permissions(port: str):
 | 
					def check_permissions(port: str):
 | 
				
			||||||
    if os.name == "posix" and get_port_type(port) == "SERIAL":
 | 
					    if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
 | 
				
			||||||
        # Check if we can open selected serial port
 | 
					        # Check if we can open selected serial port
 | 
				
			||||||
        if not os.access(port, os.F_OK):
 | 
					        if not os.access(port, os.F_OK):
 | 
				
			||||||
            raise EsphomeError(
 | 
					            raise EsphomeError(
 | 
				
			||||||
@@ -515,7 +581,7 @@ def upload_program(
 | 
				
			|||||||
    except AttributeError:
 | 
					    except AttributeError:
 | 
				
			||||||
        pass
 | 
					        pass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if get_port_type(host) == "SERIAL":
 | 
					    if get_port_type(host) == PortType.SERIAL:
 | 
				
			||||||
        check_permissions(host)
 | 
					        check_permissions(host)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        exit_code = 1
 | 
					        exit_code = 1
 | 
				
			||||||
@@ -542,17 +608,16 @@ def upload_program(
 | 
				
			|||||||
    from esphome import espota2
 | 
					    from esphome import espota2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    remote_port = int(ota_conf[CONF_PORT])
 | 
					    remote_port = int(ota_conf[CONF_PORT])
 | 
				
			||||||
    password = ota_conf.get(CONF_PASSWORD, "")
 | 
					    password = ota_conf.get(CONF_PASSWORD)
 | 
				
			||||||
    if getattr(args, "file", None) is not None:
 | 
					    if getattr(args, "file", None) is not None:
 | 
				
			||||||
        binary = Path(args.file)
 | 
					        binary = Path(args.file)
 | 
				
			||||||
    else:
 | 
					    else:
 | 
				
			||||||
        binary = CORE.firmware_bin
 | 
					        binary = CORE.firmware_bin
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # MQTT address resolution
 | 
					    # Resolve MQTT magic strings to actual IP addresses
 | 
				
			||||||
    if get_port_type(host) in ("MQTT", "MQTTIP"):
 | 
					    network_devices = _resolve_network_devices(devices, config, args)
 | 
				
			||||||
        devices = mqtt_get_ip(config, args.username, args.password, args.client_id)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return espota2.run_ota(devices, remote_port, password, binary)
 | 
					    return espota2.run_ota(network_devices, remote_port, password, binary)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
 | 
					def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
 | 
				
			||||||
@@ -567,32 +632,22 @@ def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int
 | 
				
			|||||||
        raise EsphomeError("Logger is not configured!")
 | 
					        raise EsphomeError("Logger is not configured!")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    port = devices[0]
 | 
					    port = devices[0]
 | 
				
			||||||
 | 
					    port_type = get_port_type(port)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if get_port_type(port) == "SERIAL":
 | 
					    if port_type == PortType.SERIAL:
 | 
				
			||||||
        check_permissions(port)
 | 
					        check_permissions(port)
 | 
				
			||||||
        return run_miniterm(config, port, args)
 | 
					        return run_miniterm(config, port, args)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    port_type = get_port_type(port)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Check if we should use API for logging
 | 
					    # Check if we should use API for logging
 | 
				
			||||||
    if has_api():
 | 
					    # Resolve MQTT magic strings to actual IP addresses
 | 
				
			||||||
        addresses_to_use: list[str] | None = None
 | 
					    if has_api() and (
 | 
				
			||||||
 | 
					        network_devices := _resolve_network_devices(devices, config, args)
 | 
				
			||||||
 | 
					    ):
 | 
				
			||||||
 | 
					        from esphome.components.api.client import run_logs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if port_type == "NETWORK" and (has_mdns() or is_ip_address(port)):
 | 
					        return run_logs(config, network_devices)
 | 
				
			||||||
            addresses_to_use = devices
 | 
					 | 
				
			||||||
        elif port_type in ("NETWORK", "MQTT", "MQTTIP") and has_mqtt_ip_lookup():
 | 
					 | 
				
			||||||
            # Only use MQTT IP lookup if the first condition didn't match
 | 
					 | 
				
			||||||
            # (for MQTT/MQTTIP types, or for NETWORK when mdns/ip check fails)
 | 
					 | 
				
			||||||
            addresses_to_use = mqtt_get_ip(
 | 
					 | 
				
			||||||
                config, args.username, args.password, args.client_id
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if addresses_to_use is not None:
 | 
					    if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
 | 
				
			||||||
            from esphome.components.api.client import run_logs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            return run_logs(config, addresses_to_use)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if port_type in ("NETWORK", "MQTT") and has_mqtt_logging():
 | 
					 | 
				
			||||||
        from esphome import mqtt
 | 
					        from esphome import mqtt
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return mqtt.show_logs(
 | 
					        return mqtt.show_logs(
 | 
				
			||||||
@@ -1002,6 +1057,12 @@ def parse_args(argv):
 | 
				
			|||||||
        action="append",
 | 
					        action="append",
 | 
				
			||||||
        default=[],
 | 
					        default=[],
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					    options_parser.add_argument(
 | 
				
			||||||
 | 
					        "--testing-mode",
 | 
				
			||||||
 | 
					        help="Enable testing mode (disables validation checks for grouped component testing)",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        default=False,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    parser = argparse.ArgumentParser(
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
        description=f"ESPHome {const.__version__}", parents=[options_parser]
 | 
					        description=f"ESPHome {const.__version__}", parents=[options_parser]
 | 
				
			||||||
@@ -1260,6 +1321,7 @@ def run_esphome(argv):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    args = parse_args(argv)
 | 
					    args = parse_args(argv)
 | 
				
			||||||
    CORE.dashboard = args.dashboard
 | 
					    CORE.dashboard = args.dashboard
 | 
				
			||||||
 | 
					    CORE.testing_mode = args.testing_mode
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Create address cache from command-line arguments
 | 
					    # Create address cache from command-line arguments
 | 
				
			||||||
    CORE.address_cache = AddressCache.from_cli_args(
 | 
					    CORE.address_cache = AddressCache.from_cli_args(
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -41,7 +41,7 @@ CONFIG_SCHEMA = cv.All(
 | 
				
			|||||||
    cv.Schema(
 | 
					    cv.Schema(
 | 
				
			||||||
        {
 | 
					        {
 | 
				
			||||||
            cv.GenerateID(): cv.declare_id(BME680BSECComponent),
 | 
					            cv.GenerateID(): cv.declare_id(BME680BSECComponent),
 | 
				
			||||||
            cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
 | 
					            cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
 | 
				
			||||||
            cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
 | 
					            cv.Optional(CONF_IAQ_MODE, default="STATIC"): cv.enum(
 | 
				
			||||||
                IAQ_MODE_OPTIONS, upper=True
 | 
					                IAQ_MODE_OPTIONS, upper=True
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -139,7 +139,7 @@ CONFIG_SCHEMA_BASE = (
 | 
				
			|||||||
            cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
 | 
					            cv.Optional(CONF_SUPPLY_VOLTAGE, default="3.3V"): cv.enum(
 | 
				
			||||||
                VOLTAGE_OPTIONS, upper=True
 | 
					                VOLTAGE_OPTIONS, upper=True
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
            cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature,
 | 
					            cv.Optional(CONF_TEMPERATURE_OFFSET, default=0): cv.temperature_delta,
 | 
				
			||||||
            cv.Optional(
 | 
					            cv.Optional(
 | 
				
			||||||
                CONF_STATE_SAVE_INTERVAL, default="6hours"
 | 
					                CONF_STATE_SAVE_INTERVAL, default="6hours"
 | 
				
			||||||
            ): cv.positive_time_period_minutes,
 | 
					            ): cv.positive_time_period_minutes,
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -105,9 +105,9 @@ class Canbus : public Component {
 | 
				
			|||||||
  CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
 | 
					  CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
 | 
				
			||||||
      callback_manager_{};
 | 
					      callback_manager_{};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  virtual bool setup_internal();
 | 
					  virtual bool setup_internal() = 0;
 | 
				
			||||||
  virtual Error send_message(struct CanFrame *frame);
 | 
					  virtual Error send_message(struct CanFrame *frame) = 0;
 | 
				
			||||||
  virtual Error read_message(struct CanFrame *frame);
 | 
					  virtual Error read_message(struct CanFrame *frame) = 0;
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
template<typename... Ts> class CanbusSendAction : public Action<Ts...>, public Parented<Canbus> {
 | 
					template<typename... Ts> class CanbusSendAction : public Action<Ts...>, public Parented<Canbus> {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,7 +5,7 @@ namespace dashboard_import {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
static std::string g_package_import_url;  // NOLINT
 | 
					static std::string g_package_import_url;  // NOLINT
 | 
				
			||||||
 | 
					
 | 
				
			||||||
std::string get_package_import_url() { return g_package_import_url; }
 | 
					const std::string &get_package_import_url() { return g_package_import_url; }
 | 
				
			||||||
void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
 | 
					void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}  // namespace dashboard_import
 | 
					}  // namespace dashboard_import
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,7 +5,7 @@
 | 
				
			|||||||
namespace esphome {
 | 
					namespace esphome {
 | 
				
			||||||
namespace dashboard_import {
 | 
					namespace dashboard_import {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
std::string get_package_import_url();
 | 
					const std::string &get_package_import_url();
 | 
				
			||||||
void set_package_import_url(std::string url);
 | 
					void set_package_import_url(std::string url);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}  // namespace dashboard_import
 | 
					}  // namespace dashboard_import
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase {
 | 
				
			|||||||
#endif
 | 
					#endif
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_TIME
 | 
					 | 
				
			||||||
class DateTimeStateTrigger : public Trigger<ESPTime> {
 | 
					class DateTimeStateTrigger : public Trigger<ESPTime> {
 | 
				
			||||||
 public:
 | 
					 public:
 | 
				
			||||||
  explicit DateTimeStateTrigger(DateTimeBase *parent) {
 | 
					  explicit DateTimeStateTrigger(DateTimeBase *parent) {
 | 
				
			||||||
    parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
 | 
					    parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
#endif
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
}  // namespace datetime
 | 
					}  // namespace datetime
 | 
				
			||||||
}  // namespace esphome
 | 
					}  // namespace esphome
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -775,7 +775,7 @@ void Display::test_card() {
 | 
				
			|||||||
    int shift_y = (h - image_h) / 2;
 | 
					    int shift_y = (h - image_h) / 2;
 | 
				
			||||||
    int line_w = (image_w - 6) / 6;
 | 
					    int line_w = (image_w - 6) / 6;
 | 
				
			||||||
    int image_c = image_w / 2;
 | 
					    int image_c = image_w / 2;
 | 
				
			||||||
    for (auto i = 0; i <= image_h; i++) {
 | 
					    for (auto i = 0; i != image_h; i++) {
 | 
				
			||||||
      int c = esp_scale(i, image_h);
 | 
					      int c = esp_scale(i, image_h);
 | 
				
			||||||
      this->horizontal_line(shift_x + 0, shift_y + i, line_w, r.fade_to_white(c));
 | 
					      this->horizontal_line(shift_x + 0, shift_y + i, line_w, r.fade_to_white(c));
 | 
				
			||||||
      this->horizontal_line(shift_x + line_w, shift_y + i, line_w, r.fade_to_black(c));  //
 | 
					      this->horizontal_line(shift_x + line_w, shift_y + i, line_w, r.fade_to_black(c));  //
 | 
				
			||||||
@@ -809,8 +809,11 @@ void Display::test_card() {
 | 
				
			|||||||
      }
 | 
					      }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
  this->rectangle(0, 0, w, h, Color(127, 0, 127));
 | 
					 | 
				
			||||||
  this->filled_rectangle(0, 0, 10, 10, Color(255, 0, 255));
 | 
					  this->filled_rectangle(0, 0, 10, 10, Color(255, 0, 255));
 | 
				
			||||||
 | 
					  this->filled_rectangle(w - 10, 0, 10, 10, Color(255, 0, 255));
 | 
				
			||||||
 | 
					  this->filled_rectangle(0, h - 10, 10, 10, Color(255, 0, 255));
 | 
				
			||||||
 | 
					  this->filled_rectangle(w - 10, h - 10, 10, 10, Color(255, 0, 255));
 | 
				
			||||||
 | 
					  this->rectangle(0, 0, w, h, Color(255, 255, 255));
 | 
				
			||||||
  this->stop_poller();
 | 
					  this->stop_poller();
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -314,11 +314,12 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
 | 
				
			|||||||
#  - https://github.com/espressif/arduino-esp32/releases
 | 
					#  - https://github.com/espressif/arduino-esp32/releases
 | 
				
			||||||
ARDUINO_FRAMEWORK_VERSION_LOOKUP = {
 | 
					ARDUINO_FRAMEWORK_VERSION_LOOKUP = {
 | 
				
			||||||
    "recommended": cv.Version(3, 2, 1),
 | 
					    "recommended": cv.Version(3, 2, 1),
 | 
				
			||||||
    "latest": cv.Version(3, 3, 1),
 | 
					    "latest": cv.Version(3, 3, 2),
 | 
				
			||||||
    "dev": cv.Version(3, 3, 1),
 | 
					    "dev": cv.Version(3, 3, 2),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
ARDUINO_PLATFORM_VERSION_LOOKUP = {
 | 
					ARDUINO_PLATFORM_VERSION_LOOKUP = {
 | 
				
			||||||
    cv.Version(3, 3, 1): cv.Version(55, 3, 31),
 | 
					    cv.Version(3, 3, 2): cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
 | 
					    cv.Version(3, 3, 1): cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
    cv.Version(3, 3, 0): cv.Version(55, 3, 30, "2"),
 | 
					    cv.Version(3, 3, 0): cv.Version(55, 3, 30, "2"),
 | 
				
			||||||
    cv.Version(3, 2, 1): cv.Version(54, 3, 21, "2"),
 | 
					    cv.Version(3, 2, 1): cv.Version(54, 3, 21, "2"),
 | 
				
			||||||
    cv.Version(3, 2, 0): cv.Version(54, 3, 20),
 | 
					    cv.Version(3, 2, 0): cv.Version(54, 3, 20),
 | 
				
			||||||
@@ -336,8 +337,8 @@ ESP_IDF_FRAMEWORK_VERSION_LOOKUP = {
 | 
				
			|||||||
    "dev": cv.Version(5, 5, 1),
 | 
					    "dev": cv.Version(5, 5, 1),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
ESP_IDF_PLATFORM_VERSION_LOOKUP = {
 | 
					ESP_IDF_PLATFORM_VERSION_LOOKUP = {
 | 
				
			||||||
    cv.Version(5, 5, 1): cv.Version(55, 3, 31),
 | 
					    cv.Version(5, 5, 1): cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
    cv.Version(5, 5, 0): cv.Version(55, 3, 31),
 | 
					    cv.Version(5, 5, 0): cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
    cv.Version(5, 4, 2): cv.Version(54, 3, 21, "2"),
 | 
					    cv.Version(5, 4, 2): cv.Version(54, 3, 21, "2"),
 | 
				
			||||||
    cv.Version(5, 4, 1): cv.Version(54, 3, 21, "2"),
 | 
					    cv.Version(5, 4, 1): cv.Version(54, 3, 21, "2"),
 | 
				
			||||||
    cv.Version(5, 4, 0): cv.Version(54, 3, 21, "2"),
 | 
					    cv.Version(5, 4, 0): cv.Version(54, 3, 21, "2"),
 | 
				
			||||||
@@ -352,8 +353,8 @@ ESP_IDF_PLATFORM_VERSION_LOOKUP = {
 | 
				
			|||||||
#  - https://github.com/pioarduino/platform-espressif32/releases
 | 
					#  - https://github.com/pioarduino/platform-espressif32/releases
 | 
				
			||||||
PLATFORM_VERSION_LOOKUP = {
 | 
					PLATFORM_VERSION_LOOKUP = {
 | 
				
			||||||
    "recommended": cv.Version(54, 3, 21, "2"),
 | 
					    "recommended": cv.Version(54, 3, 21, "2"),
 | 
				
			||||||
    "latest": cv.Version(55, 3, 31),
 | 
					    "latest": cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
    "dev": "https://github.com/pioarduino/platform-espressif32.git#develop",
 | 
					    "dev": cv.Version(55, 3, 31, "1"),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -645,6 +646,7 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
 | 
				
			|||||||
        + "Why change? ESP-IDF offers:\n"
 | 
					        + "Why change? ESP-IDF offers:\n"
 | 
				
			||||||
        + color(AnsiFore.GREEN, "  ✨ Up to 40% smaller binaries\n")
 | 
					        + color(AnsiFore.GREEN, "  ✨ Up to 40% smaller binaries\n")
 | 
				
			||||||
        + color(AnsiFore.GREEN, "  🚀 Better performance and optimization\n")
 | 
					        + color(AnsiFore.GREEN, "  🚀 Better performance and optimization\n")
 | 
				
			||||||
 | 
					        + color(AnsiFore.GREEN, "  ⚡ 2-3x faster compile times\n")
 | 
				
			||||||
        + color(AnsiFore.GREEN, "  📦 Custom-built firmware for your exact needs\n")
 | 
					        + color(AnsiFore.GREEN, "  📦 Custom-built firmware for your exact needs\n")
 | 
				
			||||||
        + color(
 | 
					        + color(
 | 
				
			||||||
            AnsiFore.GREEN,
 | 
					            AnsiFore.GREEN,
 | 
				
			||||||
@@ -652,7 +654,6 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
 | 
				
			|||||||
        )
 | 
					        )
 | 
				
			||||||
        + "\n"
 | 
					        + "\n"
 | 
				
			||||||
        + "Trade-offs:\n"
 | 
					        + "Trade-offs:\n"
 | 
				
			||||||
        + color(AnsiFore.YELLOW, "  ⏱️  Compile times are ~25% longer\n")
 | 
					 | 
				
			||||||
        + color(AnsiFore.YELLOW, "  🔄 Some components need migration\n")
 | 
					        + color(AnsiFore.YELLOW, "  🔄 Some components need migration\n")
 | 
				
			||||||
        + "\n"
 | 
					        + "\n"
 | 
				
			||||||
        + "What should I do?\n"
 | 
					        + "What should I do?\n"
 | 
				
			||||||
@@ -789,6 +790,7 @@ async def to_code(config):
 | 
				
			|||||||
        add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True)
 | 
					        add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True)
 | 
				
			||||||
        add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True)
 | 
					        add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True)
 | 
				
			||||||
        add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
 | 
					        add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True)
 | 
				
			||||||
 | 
					        add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    cg.add_build_flag("-Wno-nonnull-compare")
 | 
					    cg.add_build_flag("-Wno-nonnull-compare")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -6,6 +6,7 @@
 | 
				
			|||||||
#include <freertos/FreeRTOS.h>
 | 
					#include <freertos/FreeRTOS.h>
 | 
				
			||||||
#include <freertos/task.h>
 | 
					#include <freertos/task.h>
 | 
				
			||||||
#include <esp_idf_version.h>
 | 
					#include <esp_idf_version.h>
 | 
				
			||||||
 | 
					#include <esp_ota_ops.h>
 | 
				
			||||||
#include <esp_task_wdt.h>
 | 
					#include <esp_task_wdt.h>
 | 
				
			||||||
#include <esp_timer.h>
 | 
					#include <esp_timer.h>
 | 
				
			||||||
#include <soc/rtc.h>
 | 
					#include <soc/rtc.h>
 | 
				
			||||||
@@ -52,6 +53,16 @@ void arch_init() {
 | 
				
			|||||||
  disableCore1WDT();
 | 
					  disableCore1WDT();
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  // If the bootloader was compiled with CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE the current
 | 
				
			||||||
 | 
					  // partition will get rolled back unless it is marked as valid.
 | 
				
			||||||
 | 
					  esp_ota_img_states_t state;
 | 
				
			||||||
 | 
					  const esp_partition_t *running = esp_ota_get_running_partition();
 | 
				
			||||||
 | 
					  if (esp_ota_get_state_partition(running, &state) == ESP_OK) {
 | 
				
			||||||
 | 
					    if (state == ESP_OTA_IMG_PENDING_VERIFY) {
 | 
				
			||||||
 | 
					      esp_ota_mark_app_valid_cancel_rollback();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }
 | 
					void IRAM_ATTR HOT arch_feed_wdt() { esp_task_wdt_reset(); }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -285,6 +285,10 @@ def consume_connection_slots(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
def validate_connection_slots(max_connections: int) -> None:
 | 
					def validate_connection_slots(max_connections: int) -> None:
 | 
				
			||||||
    """Validate that BLE connection slots don't exceed the configured maximum."""
 | 
					    """Validate that BLE connection slots don't exceed the configured maximum."""
 | 
				
			||||||
 | 
					    # Skip validation in testing mode to allow component grouping
 | 
				
			||||||
 | 
					    if CORE.testing_mode:
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ble_data = CORE.data.get(KEY_ESP32_BLE, {})
 | 
					    ble_data = CORE.data.get(KEY_ESP32_BLE, {})
 | 
				
			||||||
    used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
 | 
					    used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
 | 
				
			||||||
    num_used = len(used_slots)
 | 
					    num_used = len(used_slots)
 | 
				
			||||||
@@ -332,12 +336,16 @@ def final_validation(config):
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    # Check if BLE Server is needed
 | 
					    # Check if BLE Server is needed
 | 
				
			||||||
    has_ble_server = "esp32_ble_server" in full_config
 | 
					    has_ble_server = "esp32_ble_server" in full_config
 | 
				
			||||||
    add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Check if BLE Client is needed (via esp32_ble_tracker or esp32_ble_client)
 | 
					    # Check if BLE Client is needed (via esp32_ble_tracker or esp32_ble_client)
 | 
				
			||||||
    has_ble_client = (
 | 
					    has_ble_client = (
 | 
				
			||||||
        "esp32_ble_tracker" in full_config or "esp32_ble_client" in full_config
 | 
					        "esp32_ble_tracker" in full_config or "esp32_ble_client" in full_config
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # ESP-IDF BLE stack requires GATT Server to be enabled when GATT Client is enabled
 | 
				
			||||||
 | 
					    # This is an internal dependency in the Bluedroid stack (tested ESP-IDF 5.4.2-5.5.1)
 | 
				
			||||||
 | 
					    # See: https://github.com/espressif/esp-idf/issues/17724
 | 
				
			||||||
 | 
					    add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server or has_ble_client)
 | 
				
			||||||
    add_idf_sdkconfig_option("CONFIG_BT_GATTC_ENABLE", has_ble_client)
 | 
					    add_idf_sdkconfig_option("CONFIG_BT_GATTC_ENABLE", has_ble_client)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Handle max_connections: check for deprecated location in esp32_ble_tracker
 | 
					    # Handle max_connections: check for deprecated location in esp32_ble_tracker
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -14,10 +14,6 @@
 | 
				
			|||||||
#include "esphome/core/hal.h"
 | 
					#include "esphome/core/hal.h"
 | 
				
			||||||
#include "esphome/core/helpers.h"
 | 
					#include "esphome/core/helpers.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_ARDUINO
 | 
					 | 
				
			||||||
#include <esp32-hal-bt.h>
 | 
					 | 
				
			||||||
#endif
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
namespace esphome {
 | 
					namespace esphome {
 | 
				
			||||||
namespace esp32_ble_beacon {
 | 
					namespace esp32_ble_beacon {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -25,10 +25,6 @@
 | 
				
			|||||||
#include <esp_coexist.h>
 | 
					#include <esp_coexist.h>
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_ARDUINO
 | 
					 | 
				
			||||||
#include <esp32-hal-bt.h>
 | 
					 | 
				
			||||||
#endif
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
#define MBEDTLS_AES_ALT
 | 
					#define MBEDTLS_AES_ALT
 | 
				
			||||||
#include <aes_alt.h>
 | 
					#include <aes_alt.h>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -143,6 +143,7 @@ void ESP32ImprovComponent::loop() {
 | 
				
			|||||||
#else
 | 
					#else
 | 
				
			||||||
      this->set_state_(improv::STATE_AUTHORIZED);
 | 
					      this->set_state_(improv::STATE_AUTHORIZED);
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					      this->check_wifi_connection_();
 | 
				
			||||||
      break;
 | 
					      break;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    case improv::STATE_AUTHORIZED: {
 | 
					    case improv::STATE_AUTHORIZED: {
 | 
				
			||||||
@@ -156,31 +157,12 @@ void ESP32ImprovComponent::loop() {
 | 
				
			|||||||
      if (!this->check_identify_()) {
 | 
					      if (!this->check_identify_()) {
 | 
				
			||||||
        this->set_status_indicator_state_((now % 1000) < 500);
 | 
					        this->set_status_indicator_state_((now % 1000) < 500);
 | 
				
			||||||
      }
 | 
					      }
 | 
				
			||||||
 | 
					      this->check_wifi_connection_();
 | 
				
			||||||
      break;
 | 
					      break;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    case improv::STATE_PROVISIONING: {
 | 
					    case improv::STATE_PROVISIONING: {
 | 
				
			||||||
      this->set_status_indicator_state_((now % 200) < 100);
 | 
					      this->set_status_indicator_state_((now % 200) < 100);
 | 
				
			||||||
      if (wifi::global_wifi_component->is_connected()) {
 | 
					      this->check_wifi_connection_();
 | 
				
			||||||
        wifi::global_wifi_component->save_wifi_sta(this->connecting_sta_.get_ssid(),
 | 
					 | 
				
			||||||
                                                   this->connecting_sta_.get_password());
 | 
					 | 
				
			||||||
        this->connecting_sta_ = {};
 | 
					 | 
				
			||||||
        this->cancel_timeout("wifi-connect-timeout");
 | 
					 | 
				
			||||||
        this->set_state_(improv::STATE_PROVISIONED);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        std::vector<std::string> urls = {ESPHOME_MY_LINK};
 | 
					 | 
				
			||||||
#ifdef USE_WEBSERVER
 | 
					 | 
				
			||||||
        for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
 | 
					 | 
				
			||||||
          if (ip.is_ip4()) {
 | 
					 | 
				
			||||||
            std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT);
 | 
					 | 
				
			||||||
            urls.push_back(webserver_url);
 | 
					 | 
				
			||||||
            break;
 | 
					 | 
				
			||||||
          }
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
#endif
 | 
					 | 
				
			||||||
        std::vector<uint8_t> data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls);
 | 
					 | 
				
			||||||
        this->send_response_(data);
 | 
					 | 
				
			||||||
        this->stop();
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
      break;
 | 
					      break;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    case improv::STATE_PROVISIONED: {
 | 
					    case improv::STATE_PROVISIONED: {
 | 
				
			||||||
@@ -392,6 +374,36 @@ void ESP32ImprovComponent::on_wifi_connect_timeout_() {
 | 
				
			|||||||
  wifi::global_wifi_component->clear_sta();
 | 
					  wifi::global_wifi_component->clear_sta();
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					void ESP32ImprovComponent::check_wifi_connection_() {
 | 
				
			||||||
 | 
					  if (!wifi::global_wifi_component->is_connected()) {
 | 
				
			||||||
 | 
					    return;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (this->state_ == improv::STATE_PROVISIONING) {
 | 
				
			||||||
 | 
					    wifi::global_wifi_component->save_wifi_sta(this->connecting_sta_.get_ssid(), this->connecting_sta_.get_password());
 | 
				
			||||||
 | 
					    this->connecting_sta_ = {};
 | 
				
			||||||
 | 
					    this->cancel_timeout("wifi-connect-timeout");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    std::vector<std::string> urls = {ESPHOME_MY_LINK};
 | 
				
			||||||
 | 
					#ifdef USE_WEBSERVER
 | 
				
			||||||
 | 
					    for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
 | 
				
			||||||
 | 
					      if (ip.is_ip4()) {
 | 
				
			||||||
 | 
					        std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT);
 | 
				
			||||||
 | 
					        urls.push_back(webserver_url);
 | 
				
			||||||
 | 
					        break;
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					#endif
 | 
				
			||||||
 | 
					    std::vector<uint8_t> data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls);
 | 
				
			||||||
 | 
					    this->send_response_(data);
 | 
				
			||||||
 | 
					  } else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) {
 | 
				
			||||||
 | 
					    ESP_LOGD(TAG, "WiFi provisioned externally");
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  this->set_state_(improv::STATE_PROVISIONED);
 | 
				
			||||||
 | 
					  this->stop();
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void ESP32ImprovComponent::advertise_service_data_() {
 | 
					void ESP32ImprovComponent::advertise_service_data_() {
 | 
				
			||||||
  uint8_t service_data[IMPROV_SERVICE_DATA_SIZE] = {};
 | 
					  uint8_t service_data[IMPROV_SERVICE_DATA_SIZE] = {};
 | 
				
			||||||
  service_data[0] = IMPROV_PROTOCOL_ID_1;  // PR
 | 
					  service_data[0] = IMPROV_PROTOCOL_ID_1;  // PR
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -111,6 +111,7 @@ class ESP32ImprovComponent : public Component {
 | 
				
			|||||||
  void send_response_(std::vector<uint8_t> &response);
 | 
					  void send_response_(std::vector<uint8_t> &response);
 | 
				
			||||||
  void process_incoming_data_();
 | 
					  void process_incoming_data_();
 | 
				
			||||||
  void on_wifi_connect_timeout_();
 | 
					  void on_wifi_connect_timeout_();
 | 
				
			||||||
 | 
					  void check_wifi_connection_();
 | 
				
			||||||
  bool check_identify_();
 | 
					  bool check_identify_();
 | 
				
			||||||
  void advertise_service_data_();
 | 
					  void advertise_service_data_();
 | 
				
			||||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_DEBUG
 | 
					#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_DEBUG
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -19,6 +19,7 @@ from esphome.const import (
 | 
				
			|||||||
from esphome.core import CORE, coroutine_with_priority
 | 
					from esphome.core import CORE, coroutine_with_priority
 | 
				
			||||||
from esphome.coroutine import CoroPriority
 | 
					from esphome.coroutine import CoroPriority
 | 
				
			||||||
import esphome.final_validate as fv
 | 
					import esphome.final_validate as fv
 | 
				
			||||||
 | 
					from esphome.types import ConfigType
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_LOGGER = logging.getLogger(__name__)
 | 
					_LOGGER = logging.getLogger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -136,11 +137,12 @@ FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
 | 
					@coroutine_with_priority(CoroPriority.OTA_UPDATES)
 | 
				
			||||||
async def to_code(config):
 | 
					async def to_code(config: ConfigType) -> None:
 | 
				
			||||||
    var = cg.new_Pvariable(config[CONF_ID])
 | 
					    var = cg.new_Pvariable(config[CONF_ID])
 | 
				
			||||||
    cg.add(var.set_port(config[CONF_PORT]))
 | 
					    cg.add(var.set_port(config[CONF_PORT]))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if CONF_PASSWORD in config:
 | 
					    # Password could be set to an empty string and we can assume that means no password
 | 
				
			||||||
 | 
					    if config.get(CONF_PASSWORD):
 | 
				
			||||||
        cg.add(var.set_auth_password(config[CONF_PASSWORD]))
 | 
					        cg.add(var.set_auth_password(config[CONF_PASSWORD]))
 | 
				
			||||||
        cg.add_define("USE_OTA_PASSWORD")
 | 
					        cg.add_define("USE_OTA_PASSWORD")
 | 
				
			||||||
        # Only include hash algorithms when password is configured
 | 
					        # Only include hash algorithms when password is configured
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -29,7 +29,7 @@ namespace esphome {
 | 
				
			|||||||
static const char *const TAG = "esphome.ota";
 | 
					static const char *const TAG = "esphome.ota";
 | 
				
			||||||
static constexpr uint16_t OTA_BLOCK_SIZE = 8192;
 | 
					static constexpr uint16_t OTA_BLOCK_SIZE = 8192;
 | 
				
			||||||
static constexpr size_t OTA_BUFFER_SIZE = 1024;                  // buffer size for OTA data transfer
 | 
					static constexpr size_t OTA_BUFFER_SIZE = 1024;                  // buffer size for OTA data transfer
 | 
				
			||||||
static constexpr uint32_t OTA_SOCKET_TIMEOUT_HANDSHAKE = 10000;  // milliseconds for initial handshake
 | 
					static constexpr uint32_t OTA_SOCKET_TIMEOUT_HANDSHAKE = 20000;  // milliseconds for initial handshake
 | 
				
			||||||
static constexpr uint32_t OTA_SOCKET_TIMEOUT_DATA = 90000;       // milliseconds for data transfer
 | 
					static constexpr uint32_t OTA_SOCKET_TIMEOUT_DATA = 90000;       // milliseconds for data transfer
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_OTA_PASSWORD
 | 
					#ifdef USE_OTA_PASSWORD
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -16,7 +16,8 @@ void HDC1080Component::setup() {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  // if configuration fails - there is a problem
 | 
					  // if configuration fails - there is a problem
 | 
				
			||||||
  if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
 | 
					  if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) {
 | 
				
			||||||
    this->mark_failed();
 | 
					    ESP_LOGW(TAG, "Failed to configure HDC1080");
 | 
				
			||||||
 | 
					    this->status_set_warning();
 | 
				
			||||||
    return;
 | 
					    return;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,8 +9,8 @@ static const char *const TAG = "htu21d";
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
static const uint8_t HTU21D_ADDRESS = 0x40;
 | 
					static const uint8_t HTU21D_ADDRESS = 0x40;
 | 
				
			||||||
static const uint8_t HTU21D_REGISTER_RESET = 0xFE;
 | 
					static const uint8_t HTU21D_REGISTER_RESET = 0xFE;
 | 
				
			||||||
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xE3;
 | 
					static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xF3;
 | 
				
			||||||
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xE5;
 | 
					static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xF5;
 | 
				
			||||||
static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */
 | 
					static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */
 | 
				
			||||||
static const uint8_t HTU21D_REGISTER_STATUS = 0xE7;
 | 
					static const uint8_t HTU21D_REGISTER_STATUS = 0xE7;
 | 
				
			||||||
static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */
 | 
					static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -8,6 +8,13 @@ namespace json {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
static const char *const TAG = "json";
 | 
					static const char *const TAG = "json";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#ifdef USE_PSRAM
 | 
				
			||||||
 | 
					// Global allocator that outlives all JsonDocuments returned by parse_json()
 | 
				
			||||||
 | 
					// This prevents dangling pointer issues when JsonDocuments are returned from functions
 | 
				
			||||||
 | 
					// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) - Must be mutable for ArduinoJson::Allocator
 | 
				
			||||||
 | 
					static SpiRamAllocator global_json_allocator;
 | 
				
			||||||
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
std::string build_json(const json_build_t &f) {
 | 
					std::string build_json(const json_build_t &f) {
 | 
				
			||||||
  // NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
 | 
					  // NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
 | 
				
			||||||
  JsonBuilder builder;
 | 
					  JsonBuilder builder;
 | 
				
			||||||
@@ -33,8 +40,7 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
 | 
				
			|||||||
    return JsonObject();  // return unbound object
 | 
					    return JsonObject();  // return unbound object
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
#ifdef USE_PSRAM
 | 
					#ifdef USE_PSRAM
 | 
				
			||||||
  auto doc_allocator = SpiRamAllocator();
 | 
					  JsonDocument json_document(&global_json_allocator);
 | 
				
			||||||
  JsonDocument json_document(&doc_allocator);
 | 
					 | 
				
			||||||
#else
 | 
					#else
 | 
				
			||||||
  JsonDocument json_document;
 | 
					  JsonDocument json_document;
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -21,11 +21,11 @@ template<uint8_t N> class MCP23XXXBase : public Component, public gpio_expander:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 protected:
 | 
					 protected:
 | 
				
			||||||
  // read a given register
 | 
					  // read a given register
 | 
				
			||||||
  virtual bool read_reg(uint8_t reg, uint8_t *value);
 | 
					  virtual bool read_reg(uint8_t reg, uint8_t *value) = 0;
 | 
				
			||||||
  // write a value to a given register
 | 
					  // write a value to a given register
 | 
				
			||||||
  virtual bool write_reg(uint8_t reg, uint8_t value);
 | 
					  virtual bool write_reg(uint8_t reg, uint8_t value) = 0;
 | 
				
			||||||
  // update registers with given pin value.
 | 
					  // update registers with given pin value.
 | 
				
			||||||
  virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a);
 | 
					  virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a) = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  bool open_drain_ints_;
 | 
					  bool open_drain_ints_;
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,7 +11,7 @@ from esphome.const import (
 | 
				
			|||||||
    CONF_SERVICES,
 | 
					    CONF_SERVICES,
 | 
				
			||||||
    PlatformFramework,
 | 
					    PlatformFramework,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from esphome.core import CORE, coroutine_with_priority
 | 
					from esphome.core import CORE, Lambda, coroutine_with_priority
 | 
				
			||||||
from esphome.coroutine import CoroPriority
 | 
					from esphome.coroutine import CoroPriority
 | 
				
			||||||
 | 
					
 | 
				
			||||||
CODEOWNERS = ["@esphome/core"]
 | 
					CODEOWNERS = ["@esphome/core"]
 | 
				
			||||||
@@ -58,17 +58,64 @@ CONFIG_SCHEMA = cv.All(
 | 
				
			|||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def mdns_txt_record(key: str, value: str):
 | 
					def mdns_txt_record(key: str, value: str) -> cg.RawExpression:
 | 
				
			||||||
    return cg.StructInitializer(
 | 
					    """Create a mDNS TXT record.
 | 
				
			||||||
        MDNSTXTRecord,
 | 
					
 | 
				
			||||||
        ("key", cg.RawExpression(f"MDNS_STR({cg.safe_exp(key)})")),
 | 
					    Public API for external components. Do not remove.
 | 
				
			||||||
        ("value", value),
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        key: The TXT record key
 | 
				
			||||||
 | 
					        value: The TXT record value (static string only)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        A RawExpression representing a MDNSTXTRecord struct
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    return cg.RawExpression(
 | 
				
			||||||
 | 
					        f"{{MDNS_STR({cg.safe_exp(key)}), MDNS_STR({cg.safe_exp(value)})}}"
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					async def _mdns_txt_record_templated(
 | 
				
			||||||
 | 
					    mdns_comp: cg.Pvariable, key: str, value: Lambda | str
 | 
				
			||||||
 | 
					) -> cg.RawExpression:
 | 
				
			||||||
 | 
					    """Create a mDNS TXT record with support for templated values.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Internal helper function.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        mdns_comp: The MDNSComponent instance (from cg.get_variable())
 | 
				
			||||||
 | 
					        key: The TXT record key
 | 
				
			||||||
 | 
					        value: The TXT record value (can be a static string or a lambda template)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        A RawExpression representing a MDNSTXTRecord struct
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not cg.is_template(value):
 | 
				
			||||||
 | 
					        # It's a static string - use directly in flash, no need to store in vector
 | 
				
			||||||
 | 
					        return mdns_txt_record(key, value)
 | 
				
			||||||
 | 
					    # It's a lambda - evaluate and store using helper
 | 
				
			||||||
 | 
					    templated_value = await cg.templatable(value, [], cg.std_string)
 | 
				
			||||||
 | 
					    safe_key = cg.safe_exp(key)
 | 
				
			||||||
 | 
					    dynamic_call = f"{mdns_comp}->add_dynamic_txt_value(({templated_value})())"
 | 
				
			||||||
 | 
					    return cg.RawExpression(f"{{MDNS_STR({safe_key}), MDNS_STR({dynamic_call})}}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def mdns_service(
 | 
					def mdns_service(
 | 
				
			||||||
    service: str, proto: str, port: int, txt_records: list[dict[str, str]]
 | 
					    service: str, proto: str, port: int, txt_records: list[cg.RawExpression]
 | 
				
			||||||
):
 | 
					) -> cg.StructInitializer:
 | 
				
			||||||
 | 
					    """Create a mDNS service.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Public API for external components. Do not remove.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        service: Service name (e.g., "_http")
 | 
				
			||||||
 | 
					        proto: Protocol (e.g., "_tcp" or "_udp")
 | 
				
			||||||
 | 
					        port: Port number
 | 
				
			||||||
 | 
					        txt_records: List of MDNSTXTRecord expressions
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        A StructInitializer representing a MDNSService struct
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
    return cg.StructInitializer(
 | 
					    return cg.StructInitializer(
 | 
				
			||||||
        MDNSService,
 | 
					        MDNSService,
 | 
				
			||||||
        ("service_type", cg.RawExpression(f"MDNS_STR({cg.safe_exp(service)})")),
 | 
					        ("service_type", cg.RawExpression(f"MDNS_STR({cg.safe_exp(service)})")),
 | 
				
			||||||
@@ -107,23 +154,37 @@ async def to_code(config):
 | 
				
			|||||||
    # Ensure at least 1 service (fallback service)
 | 
					    # Ensure at least 1 service (fallback service)
 | 
				
			||||||
    cg.add_define("MDNS_SERVICE_COUNT", max(1, service_count))
 | 
					    cg.add_define("MDNS_SERVICE_COUNT", max(1, service_count))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Calculate compile-time dynamic TXT value count
 | 
				
			||||||
 | 
					    # Dynamic values are those that cannot be stored in flash at compile time
 | 
				
			||||||
 | 
					    dynamic_txt_count = 0
 | 
				
			||||||
 | 
					    if "api" in CORE.config:
 | 
				
			||||||
 | 
					        # Always: get_mac_address()
 | 
				
			||||||
 | 
					        dynamic_txt_count += 1
 | 
				
			||||||
 | 
					    # User-provided templatable TXT values (only lambdas, not static strings)
 | 
				
			||||||
 | 
					    dynamic_txt_count += sum(
 | 
				
			||||||
 | 
					        1
 | 
				
			||||||
 | 
					        for service in config[CONF_SERVICES]
 | 
				
			||||||
 | 
					        for txt_value in service[CONF_TXT].values()
 | 
				
			||||||
 | 
					        if cg.is_template(txt_value)
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Ensure at least 1 to avoid zero-size array
 | 
				
			||||||
 | 
					    cg.add_define("MDNS_DYNAMIC_TXT_COUNT", max(1, dynamic_txt_count))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    var = cg.new_Pvariable(config[CONF_ID])
 | 
					    var = cg.new_Pvariable(config[CONF_ID])
 | 
				
			||||||
    await cg.register_component(var, config)
 | 
					    await cg.register_component(var, config)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    for service in config[CONF_SERVICES]:
 | 
					    for service in config[CONF_SERVICES]:
 | 
				
			||||||
        txt = [
 | 
					        txt_records = [
 | 
				
			||||||
            cg.StructInitializer(
 | 
					            await _mdns_txt_record_templated(var, txt_key, txt_value)
 | 
				
			||||||
                MDNSTXTRecord,
 | 
					 | 
				
			||||||
                ("key", cg.RawExpression(f"MDNS_STR({cg.safe_exp(txt_key)})")),
 | 
					 | 
				
			||||||
                ("value", await cg.templatable(txt_value, [], cg.std_string)),
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            for txt_key, txt_value in service[CONF_TXT].items()
 | 
					            for txt_key, txt_value in service[CONF_TXT].items()
 | 
				
			||||||
        ]
 | 
					        ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        exp = mdns_service(
 | 
					        exp = mdns_service(
 | 
				
			||||||
            service[CONF_SERVICE],
 | 
					            service[CONF_SERVICE],
 | 
				
			||||||
            service[CONF_PROTOCOL],
 | 
					            service[CONF_PROTOCOL],
 | 
				
			||||||
            await cg.templatable(service[CONF_PORT], [], cg.uint16),
 | 
					            await cg.templatable(service[CONF_PORT], [], cg.uint16),
 | 
				
			||||||
            txt,
 | 
					            txt_records,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        cg.add(var.add_extra_service(exp))
 | 
					        cg.add(var.add_extra_service(exp))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,21 +9,9 @@
 | 
				
			|||||||
#include <pgmspace.h>
 | 
					#include <pgmspace.h>
 | 
				
			||||||
// Macro to define strings in PROGMEM on ESP8266, regular memory on other platforms
 | 
					// Macro to define strings in PROGMEM on ESP8266, regular memory on other platforms
 | 
				
			||||||
#define MDNS_STATIC_CONST_CHAR(name, value) static const char name[] PROGMEM = value
 | 
					#define MDNS_STATIC_CONST_CHAR(name, value) static const char name[] PROGMEM = value
 | 
				
			||||||
// Helper to convert PROGMEM string to std::string for TemplatableValue
 | 
					 | 
				
			||||||
// Only define this function if we have services that will use it
 | 
					 | 
				
			||||||
#if defined(USE_API) || defined(USE_PROMETHEUS) || defined(USE_WEBSERVER) || defined(USE_MDNS_EXTRA_SERVICES)
 | 
					 | 
				
			||||||
static std::string mdns_str_value(PGM_P str) {
 | 
					 | 
				
			||||||
  char buf[64];
 | 
					 | 
				
			||||||
  strncpy_P(buf, str, sizeof(buf) - 1);
 | 
					 | 
				
			||||||
  buf[sizeof(buf) - 1] = '\0';
 | 
					 | 
				
			||||||
  return std::string(buf);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
#define MDNS_STR_VALUE(name) mdns_str_value(name)
 | 
					 | 
				
			||||||
#endif
 | 
					 | 
				
			||||||
#else
 | 
					#else
 | 
				
			||||||
// On non-ESP8266 platforms, use regular const char*
 | 
					// On non-ESP8266 platforms, use regular const char*
 | 
				
			||||||
#define MDNS_STATIC_CONST_CHAR(name, value) static constexpr const char name[] = value
 | 
					#define MDNS_STATIC_CONST_CHAR(name, value) static constexpr const char name[] = value
 | 
				
			||||||
#define MDNS_STR_VALUE(name) std::string(name)
 | 
					 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_API
 | 
					#ifdef USE_API
 | 
				
			||||||
@@ -43,30 +31,10 @@ static const char *const TAG = "mdns";
 | 
				
			|||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// Define all constant strings using the macro
 | 
					// Define all constant strings using the macro
 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(SERVICE_TCP, "_tcp");
 | 
					MDNS_STATIC_CONST_CHAR(SERVICE_TCP, "_tcp");
 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
 | 
					// Wrap build-time defines into flash storage
 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
					MDNS_STATIC_CONST_CHAR(VALUE_VERSION, ESPHOME_VERSION);
 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
 | 
					 | 
				
			||||||
MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
void MDNSComponent::compile_records_() {
 | 
					void MDNSComponent::compile_records_() {
 | 
				
			||||||
  this->hostname_ = App.get_name();
 | 
					  this->hostname_ = App.get_name();
 | 
				
			||||||
@@ -75,6 +43,15 @@ void MDNSComponent::compile_records_() {
 | 
				
			|||||||
  // in mdns/__init__.py. If you add a new service here, update both locations.
 | 
					  // in mdns/__init__.py. If you add a new service here, update both locations.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_API
 | 
					#ifdef USE_API
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(VALUE_BOARD, ESPHOME_BOARD);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  if (api::global_api_server != nullptr) {
 | 
					  if (api::global_api_server != nullptr) {
 | 
				
			||||||
    auto &service = this->services_.emplace_next();
 | 
					    auto &service = this->services_.emplace_next();
 | 
				
			||||||
    service.service_type = MDNS_STR(SERVICE_ESPHOMELIB);
 | 
					    service.service_type = MDNS_STR(SERVICE_ESPHOMELIB);
 | 
				
			||||||
@@ -109,52 +86,66 @@ void MDNSComponent::compile_records_() {
 | 
				
			|||||||
    txt_records.reserve(txt_count);
 | 
					    txt_records.reserve(txt_count);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if (!friendly_name_empty) {
 | 
					    if (!friendly_name_empty) {
 | 
				
			||||||
      txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), friendly_name});
 | 
					      txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), MDNS_STR(friendly_name.c_str())});
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_VERSION), ESPHOME_VERSION});
 | 
					    txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_MAC), get_mac_address()});
 | 
					    txt_records.push_back({MDNS_STR(TXT_MAC), MDNS_STR(this->add_dynamic_txt_value(get_mac_address()))});
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_ESP8266
 | 
					#ifdef USE_ESP8266
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_ESP8266)});
 | 
					    MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP8266)});
 | 
				
			||||||
#elif defined(USE_ESP32)
 | 
					#elif defined(USE_ESP32)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_ESP32)});
 | 
					    MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP32)});
 | 
				
			||||||
#elif defined(USE_RP2040)
 | 
					#elif defined(USE_RP2040)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR_VALUE(PLATFORM_RP2040)});
 | 
					    MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_RP2040)});
 | 
				
			||||||
#elif defined(USE_LIBRETINY)
 | 
					#elif defined(USE_LIBRETINY)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PLATFORM), lt_cpu_get_model_name()});
 | 
					    txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(lt_cpu_get_model_name())});
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_BOARD), ESPHOME_BOARD});
 | 
					    txt_records.push_back({MDNS_STR(TXT_BOARD), MDNS_STR(VALUE_BOARD)});
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(USE_WIFI)
 | 
					#if defined(USE_WIFI)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_WIFI)});
 | 
					    MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_WIFI)});
 | 
				
			||||||
#elif defined(USE_ETHERNET)
 | 
					#elif defined(USE_ETHERNET)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_ETHERNET)});
 | 
					    MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_ETHERNET)});
 | 
				
			||||||
#elif defined(USE_OPENTHREAD)
 | 
					#elif defined(USE_OPENTHREAD)
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR_VALUE(NETWORK_THREAD)});
 | 
					    MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_THREAD)});
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_API_NOISE
 | 
					#ifdef USE_API_NOISE
 | 
				
			||||||
 | 
					    MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
 | 
				
			||||||
 | 
					    MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
 | 
				
			||||||
    MDNS_STATIC_CONST_CHAR(NOISE_ENCRYPTION, "Noise_NNpsk0_25519_ChaChaPoly_SHA256");
 | 
					    MDNS_STATIC_CONST_CHAR(NOISE_ENCRYPTION, "Noise_NNpsk0_25519_ChaChaPoly_SHA256");
 | 
				
			||||||
    if (api::global_api_server->get_noise_ctx()->has_psk()) {
 | 
					    bool has_psk = api::global_api_server->get_noise_ctx()->has_psk();
 | 
				
			||||||
      txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION), MDNS_STR_VALUE(NOISE_ENCRYPTION)});
 | 
					    const char *encryption_key = has_psk ? TXT_API_ENCRYPTION : TXT_API_ENCRYPTION_SUPPORTED;
 | 
				
			||||||
    } else {
 | 
					    txt_records.push_back({MDNS_STR(encryption_key), MDNS_STR(NOISE_ENCRYPTION)});
 | 
				
			||||||
      txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION_SUPPORTED), MDNS_STR_VALUE(NOISE_ENCRYPTION)});
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef ESPHOME_PROJECT_NAME
 | 
					#ifdef ESPHOME_PROJECT_NAME
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), ESPHOME_PROJECT_NAME});
 | 
					    MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), ESPHOME_PROJECT_VERSION});
 | 
					    MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
 | 
				
			||||||
 | 
					    MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_NAME, ESPHOME_PROJECT_NAME);
 | 
				
			||||||
 | 
					    MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_VERSION, ESPHOME_PROJECT_VERSION);
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), MDNS_STR(VALUE_PROJECT_NAME)});
 | 
				
			||||||
 | 
					    txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), MDNS_STR(VALUE_PROJECT_VERSION)});
 | 
				
			||||||
#endif  // ESPHOME_PROJECT_NAME
 | 
					#endif  // ESPHOME_PROJECT_NAME
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_DASHBOARD_IMPORT
 | 
					#ifdef USE_DASHBOARD_IMPORT
 | 
				
			||||||
    txt_records.push_back({MDNS_STR(TXT_PACKAGE_IMPORT_URL), dashboard_import::get_package_import_url()});
 | 
					    MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
 | 
				
			||||||
 | 
					    txt_records.push_back(
 | 
				
			||||||
 | 
					        {MDNS_STR(TXT_PACKAGE_IMPORT_URL), MDNS_STR(dashboard_import::get_package_import_url().c_str())});
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
#endif  // USE_API
 | 
					#endif  // USE_API
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_PROMETHEUS
 | 
					#ifdef USE_PROMETHEUS
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  auto &prom_service = this->services_.emplace_next();
 | 
					  auto &prom_service = this->services_.emplace_next();
 | 
				
			||||||
  prom_service.service_type = MDNS_STR(SERVICE_PROMETHEUS);
 | 
					  prom_service.service_type = MDNS_STR(SERVICE_PROMETHEUS);
 | 
				
			||||||
  prom_service.proto = MDNS_STR(SERVICE_TCP);
 | 
					  prom_service.proto = MDNS_STR(SERVICE_TCP);
 | 
				
			||||||
@@ -162,6 +153,8 @@ void MDNSComponent::compile_records_() {
 | 
				
			|||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_WEBSERVER
 | 
					#ifdef USE_WEBSERVER
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  auto &web_service = this->services_.emplace_next();
 | 
					  auto &web_service = this->services_.emplace_next();
 | 
				
			||||||
  web_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
					  web_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
				
			||||||
  web_service.proto = MDNS_STR(SERVICE_TCP);
 | 
					  web_service.proto = MDNS_STR(SERVICE_TCP);
 | 
				
			||||||
@@ -169,13 +162,16 @@ void MDNSComponent::compile_records_() {
 | 
				
			|||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if !defined(USE_API) && !defined(USE_PROMETHEUS) && !defined(USE_WEBSERVER) && !defined(USE_MDNS_EXTRA_SERVICES)
 | 
					#if !defined(USE_API) && !defined(USE_PROMETHEUS) && !defined(USE_WEBSERVER) && !defined(USE_MDNS_EXTRA_SERVICES)
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
 | 
				
			||||||
 | 
					  MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Publish "http" service if not using native API or any other services
 | 
					  // Publish "http" service if not using native API or any other services
 | 
				
			||||||
  // This is just to have *some* mDNS service so that .local resolution works
 | 
					  // This is just to have *some* mDNS service so that .local resolution works
 | 
				
			||||||
  auto &fallback_service = this->services_.emplace_next();
 | 
					  auto &fallback_service = this->services_.emplace_next();
 | 
				
			||||||
  fallback_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
					  fallback_service.service_type = MDNS_STR(SERVICE_HTTP);
 | 
				
			||||||
  fallback_service.proto = MDNS_STR(SERVICE_TCP);
 | 
					  fallback_service.proto = MDNS_STR(SERVICE_TCP);
 | 
				
			||||||
  fallback_service.port = USE_WEBSERVER_PORT;
 | 
					  fallback_service.port = USE_WEBSERVER_PORT;
 | 
				
			||||||
  fallback_service.txt_records.push_back({MDNS_STR(TXT_VERSION), ESPHOME_VERSION});
 | 
					  fallback_service.txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -190,8 +186,7 @@ void MDNSComponent::dump_config() {
 | 
				
			|||||||
    ESP_LOGV(TAG, "  - %s, %s, %d", MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto),
 | 
					    ESP_LOGV(TAG, "  - %s, %s, %d", MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto),
 | 
				
			||||||
             const_cast<TemplatableValue<uint16_t> &>(service.port).value());
 | 
					             const_cast<TemplatableValue<uint16_t> &>(service.port).value());
 | 
				
			||||||
    for (const auto &record : service.txt_records) {
 | 
					    for (const auto &record : service.txt_records) {
 | 
				
			||||||
      ESP_LOGV(TAG, "    TXT: %s = %s", MDNS_STR_ARG(record.key),
 | 
					      ESP_LOGV(TAG, "    TXT: %s = %s", MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
				
			||||||
               const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -27,7 +27,7 @@ struct MDNSString;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
struct MDNSTXTRecord {
 | 
					struct MDNSTXTRecord {
 | 
				
			||||||
  const MDNSString *key;
 | 
					  const MDNSString *key;
 | 
				
			||||||
  TemplatableValue<std::string> value;
 | 
					  const MDNSString *value;
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
struct MDNSService {
 | 
					struct MDNSService {
 | 
				
			||||||
@@ -59,6 +59,17 @@ class MDNSComponent : public Component {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  void on_shutdown() override;
 | 
					  void on_shutdown() override;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /// Add a dynamic TXT value and return pointer to it for use in MDNSTXTRecord
 | 
				
			||||||
 | 
					  const char *add_dynamic_txt_value(const std::string &value) {
 | 
				
			||||||
 | 
					    this->dynamic_txt_values_.push_back(value);
 | 
				
			||||||
 | 
					    return this->dynamic_txt_values_[this->dynamic_txt_values_.size() - 1].c_str();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  /// Storage for runtime-generated TXT values (MAC address, user lambdas)
 | 
				
			||||||
 | 
					  /// Pre-sized at compile time via MDNS_DYNAMIC_TXT_COUNT to avoid heap allocations.
 | 
				
			||||||
 | 
					  /// Static/compile-time values (version, board, etc.) are stored directly in flash and don't use this.
 | 
				
			||||||
 | 
					  StaticVector<std::string, MDNS_DYNAMIC_TXT_COUNT> dynamic_txt_values_;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 protected:
 | 
					 protected:
 | 
				
			||||||
  StaticVector<MDNSService, MDNS_SERVICE_COUNT> services_{};
 | 
					  StaticVector<MDNSService, MDNS_SERVICE_COUNT> services_{};
 | 
				
			||||||
  std::string hostname_;
 | 
					  std::string hostname_;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,7 +2,6 @@
 | 
				
			|||||||
#if defined(USE_ESP32) && defined(USE_MDNS)
 | 
					#if defined(USE_ESP32) && defined(USE_MDNS)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#include <mdns.h>
 | 
					#include <mdns.h>
 | 
				
			||||||
#include <cstring>
 | 
					 | 
				
			||||||
#include "esphome/core/hal.h"
 | 
					#include "esphome/core/hal.h"
 | 
				
			||||||
#include "esphome/core/log.h"
 | 
					#include "esphome/core/log.h"
 | 
				
			||||||
#include "mdns_component.h"
 | 
					#include "mdns_component.h"
 | 
				
			||||||
@@ -29,21 +28,16 @@ void MDNSComponent::setup() {
 | 
				
			|||||||
    std::vector<mdns_txt_item_t> txt_records;
 | 
					    std::vector<mdns_txt_item_t> txt_records;
 | 
				
			||||||
    for (const auto &record : service.txt_records) {
 | 
					    for (const auto &record : service.txt_records) {
 | 
				
			||||||
      mdns_txt_item_t it{};
 | 
					      mdns_txt_item_t it{};
 | 
				
			||||||
      // key is a compile-time string literal in flash, no need to strdup
 | 
					      // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_
 | 
				
			||||||
 | 
					      // Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies
 | 
				
			||||||
      it.key = MDNS_STR_ARG(record.key);
 | 
					      it.key = MDNS_STR_ARG(record.key);
 | 
				
			||||||
      // value is a temporary from TemplatableValue, must strdup to keep it alive
 | 
					      it.value = MDNS_STR_ARG(record.value);
 | 
				
			||||||
      it.value = strdup(const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
					 | 
				
			||||||
      txt_records.push_back(it);
 | 
					      txt_records.push_back(it);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
					    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
				
			||||||
    err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
 | 
					    err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
 | 
				
			||||||
                           txt_records.data(), txt_records.size());
 | 
					                           txt_records.data(), txt_records.size());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // free records
 | 
					 | 
				
			||||||
    for (const auto &it : txt_records) {
 | 
					 | 
				
			||||||
      free((void *) it.value);  // NOLINT(cppcoreguidelines-no-malloc)
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if (err != ESP_OK) {
 | 
					    if (err != ESP_OK) {
 | 
				
			||||||
      ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
 | 
					      ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -33,7 +33,7 @@ void MDNSComponent::setup() {
 | 
				
			|||||||
    MDNS.addService(FPSTR(service_type), FPSTR(proto), port);
 | 
					    MDNS.addService(FPSTR(service_type), FPSTR(proto), port);
 | 
				
			||||||
    for (const auto &record : service.txt_records) {
 | 
					    for (const auto &record : service.txt_records) {
 | 
				
			||||||
      MDNS.addServiceTxt(FPSTR(service_type), FPSTR(proto), FPSTR(MDNS_STR_ARG(record.key)),
 | 
					      MDNS.addServiceTxt(FPSTR(service_type), FPSTR(proto), FPSTR(MDNS_STR_ARG(record.key)),
 | 
				
			||||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
					                         FPSTR(MDNS_STR_ARG(record.value)));
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -32,8 +32,7 @@ void MDNSComponent::setup() {
 | 
				
			|||||||
    uint16_t port_ = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
					    uint16_t port_ = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
				
			||||||
    MDNS.addService(service_type, proto, port_);
 | 
					    MDNS.addService(service_type, proto, port_);
 | 
				
			||||||
    for (const auto &record : service.txt_records) {
 | 
					    for (const auto &record : service.txt_records) {
 | 
				
			||||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key),
 | 
					      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
				
			||||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -32,8 +32,7 @@ void MDNSComponent::setup() {
 | 
				
			|||||||
    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
					    uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
 | 
				
			||||||
    MDNS.addService(service_type, proto, port);
 | 
					    MDNS.addService(service_type, proto, port);
 | 
				
			||||||
    for (const auto &record : service.txt_records) {
 | 
					    for (const auto &record : service.txt_records) {
 | 
				
			||||||
      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key),
 | 
					      MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
 | 
				
			||||||
                         const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,6 +11,7 @@ from esphome.const import (
 | 
				
			|||||||
    CONF_BRIGHTNESS,
 | 
					    CONF_BRIGHTNESS,
 | 
				
			||||||
    CONF_COLOR_ORDER,
 | 
					    CONF_COLOR_ORDER,
 | 
				
			||||||
    CONF_DIMENSIONS,
 | 
					    CONF_DIMENSIONS,
 | 
				
			||||||
 | 
					    CONF_DISABLED,
 | 
				
			||||||
    CONF_HEIGHT,
 | 
					    CONF_HEIGHT,
 | 
				
			||||||
    CONF_INIT_SEQUENCE,
 | 
					    CONF_INIT_SEQUENCE,
 | 
				
			||||||
    CONF_INVERT_COLORS,
 | 
					    CONF_INVERT_COLORS,
 | 
				
			||||||
@@ -301,6 +302,8 @@ class DriverChip:
 | 
				
			|||||||
        Check if a rotation can be implemented in hardware using the MADCTL register.
 | 
					        Check if a rotation can be implemented in hardware using the MADCTL register.
 | 
				
			||||||
        A rotation of 180 is always possible if x and y mirroring are supported, 90 and 270 are possible if the model supports swapping X and Y.
 | 
					        A rotation of 180 is always possible if x and y mirroring are supported, 90 and 270 are possible if the model supports swapping X and Y.
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
 | 
					        if config.get(CONF_TRANSFORM) == CONF_DISABLED:
 | 
				
			||||||
 | 
					            return False
 | 
				
			||||||
        transforms = self.transforms
 | 
					        transforms = self.transforms
 | 
				
			||||||
        rotation = config.get(CONF_ROTATION, 0)
 | 
					        rotation = config.get(CONF_ROTATION, 0)
 | 
				
			||||||
        if rotation == 0 or not transforms:
 | 
					        if rotation == 0 or not transforms:
 | 
				
			||||||
@@ -358,26 +361,26 @@ class DriverChip:
 | 
				
			|||||||
                CONF_SWAP_XY: self.get_default(CONF_SWAP_XY),
 | 
					                CONF_SWAP_XY: self.get_default(CONF_SWAP_XY),
 | 
				
			||||||
            },
 | 
					            },
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        # fill in defaults if not provided
 | 
					        if not isinstance(transform, dict):
 | 
				
			||||||
        mirror_x = transform.get(CONF_MIRROR_X, self.get_default(CONF_MIRROR_X))
 | 
					            # Presumably disabled
 | 
				
			||||||
        mirror_y = transform.get(CONF_MIRROR_Y, self.get_default(CONF_MIRROR_Y))
 | 
					            return {
 | 
				
			||||||
        swap_xy = transform.get(CONF_SWAP_XY, self.get_default(CONF_SWAP_XY))
 | 
					                CONF_MIRROR_X: False,
 | 
				
			||||||
        transform[CONF_MIRROR_X] = mirror_x
 | 
					                CONF_MIRROR_Y: False,
 | 
				
			||||||
        transform[CONF_MIRROR_Y] = mirror_y
 | 
					                CONF_SWAP_XY: False,
 | 
				
			||||||
        transform[CONF_SWAP_XY] = swap_xy
 | 
					                CONF_TRANSFORM: False,
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
        # Can we use the MADCTL register to set the rotation?
 | 
					        # Can we use the MADCTL register to set the rotation?
 | 
				
			||||||
        if can_transform and CONF_TRANSFORM not in config:
 | 
					        if can_transform and CONF_TRANSFORM not in config:
 | 
				
			||||||
            rotation = config[CONF_ROTATION]
 | 
					            rotation = config[CONF_ROTATION]
 | 
				
			||||||
            if rotation == 180:
 | 
					            if rotation == 180:
 | 
				
			||||||
                transform[CONF_MIRROR_X] = not mirror_x
 | 
					                transform[CONF_MIRROR_X] = not transform[CONF_MIRROR_X]
 | 
				
			||||||
                transform[CONF_MIRROR_Y] = not mirror_y
 | 
					                transform[CONF_MIRROR_Y] = not transform[CONF_MIRROR_Y]
 | 
				
			||||||
            elif rotation == 90:
 | 
					            elif rotation == 90:
 | 
				
			||||||
                transform[CONF_SWAP_XY] = not swap_xy
 | 
					                transform[CONF_SWAP_XY] = not transform[CONF_SWAP_XY]
 | 
				
			||||||
                transform[CONF_MIRROR_X] = not mirror_x
 | 
					                transform[CONF_MIRROR_X] = not transform[CONF_MIRROR_X]
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                transform[CONF_SWAP_XY] = not swap_xy
 | 
					                transform[CONF_SWAP_XY] = not transform[CONF_SWAP_XY]
 | 
				
			||||||
                transform[CONF_MIRROR_Y] = not mirror_y
 | 
					                transform[CONF_MIRROR_Y] = not transform[CONF_MIRROR_Y]
 | 
				
			||||||
            transform[CONF_TRANSFORM] = True
 | 
					            transform[CONF_TRANSFORM] = True
 | 
				
			||||||
        return transform
 | 
					        return transform
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -56,50 +56,41 @@ DriverChip(
 | 
				
			|||||||
    "WAVESHARE-P4-86-PANEL",
 | 
					    "WAVESHARE-P4-86-PANEL",
 | 
				
			||||||
    height=720,
 | 
					    height=720,
 | 
				
			||||||
    width=720,
 | 
					    width=720,
 | 
				
			||||||
    hsync_back_porch=80,
 | 
					    hsync_back_porch=50,
 | 
				
			||||||
    hsync_pulse_width=20,
 | 
					    hsync_pulse_width=20,
 | 
				
			||||||
    hsync_front_porch=80,
 | 
					    hsync_front_porch=50,
 | 
				
			||||||
    vsync_back_porch=12,
 | 
					    vsync_back_porch=20,
 | 
				
			||||||
    vsync_pulse_width=4,
 | 
					    vsync_pulse_width=4,
 | 
				
			||||||
    vsync_front_porch=30,
 | 
					    vsync_front_porch=20,
 | 
				
			||||||
    pclk_frequency="46MHz",
 | 
					    pclk_frequency="38MHz",
 | 
				
			||||||
    lane_bit_rate="1Gbps",
 | 
					    lane_bit_rate="480Mbps",
 | 
				
			||||||
    swap_xy=cv.UNDEFINED,
 | 
					    swap_xy=cv.UNDEFINED,
 | 
				
			||||||
    color_order="RGB",
 | 
					    color_order="RGB",
 | 
				
			||||||
    reset_pin=27,
 | 
					    reset_pin=27,
 | 
				
			||||||
    initsequence=[
 | 
					    initsequence=[
 | 
				
			||||||
        (0xB9, 0xF1, 0x12, 0x83),
 | 
					        (0xB9, 0xF1, 0x12, 0x83),
 | 
				
			||||||
        (
 | 
					        (0xB1, 0x00, 0x00, 0x00, 0xDA, 0x80),
 | 
				
			||||||
            0xBA, 0x31, 0x81, 0x05, 0xF9, 0x0E, 0x0E, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x25, 0x00,
 | 
					        (0xB2, 0x3C, 0x12, 0x30),
 | 
				
			||||||
            0x90, 0x0A, 0x00, 0x00, 0x01, 0x4F, 0x01, 0x00, 0x00, 0x37,
 | 
					 | 
				
			||||||
        ),
 | 
					 | 
				
			||||||
        (0xB8, 0x25, 0x22, 0xF0, 0x63),
 | 
					 | 
				
			||||||
        (0xBF, 0x02, 0x11, 0x00),
 | 
					 | 
				
			||||||
        (0xB3, 0x10, 0x10, 0x28, 0x28, 0x03, 0xFF, 0x00, 0x00, 0x00, 0x00),
 | 
					        (0xB3, 0x10, 0x10, 0x28, 0x28, 0x03, 0xFF, 0x00, 0x00, 0x00, 0x00),
 | 
				
			||||||
        (0xC0, 0x73, 0x73, 0x50, 0x50, 0x00, 0x00, 0x12, 0x70, 0x00),
 | 
					        (0xB4, 0x80),
 | 
				
			||||||
        (0xBC, 0x46), (0xCC, 0x0B), (0xB4, 0x80), (0xB2, 0x3C, 0x12, 0x30),
 | 
					 | 
				
			||||||
        (0xE3, 0x07, 0x07, 0x0B, 0x0B, 0x03, 0x0B, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0xC0, 0x10,),
 | 
					 | 
				
			||||||
        (0xC1, 0x36, 0x00, 0x32, 0x32, 0x77, 0xF1, 0xCC, 0xCC, 0x77, 0x77, 0x33, 0x33),
 | 
					 | 
				
			||||||
        (0xB5, 0x0A, 0x0A),
 | 
					        (0xB5, 0x0A, 0x0A),
 | 
				
			||||||
        (0xB6, 0xB2, 0xB2),
 | 
					        (0xB6, 0x97, 0x97),
 | 
				
			||||||
        (
 | 
					        (0xB8, 0x26, 0x22, 0xF0, 0x13),
 | 
				
			||||||
            0xE9, 0xC8, 0x10, 0x0A, 0x10, 0x0F, 0xA1, 0x80, 0x12, 0x31, 0x23, 0x47, 0x86, 0xA1, 0x80,
 | 
					        (0xBA, 0x31, 0x81, 0x0F, 0xF9, 0x0E, 0x06, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x25, 0x00, 0x90, 0x0A, 0x00, 0x00, 0x01, 0x4F, 0x01, 0x00, 0x00, 0x37),
 | 
				
			||||||
            0x47, 0x08, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0D, 0x00, 0x00, 0x00, 0x48,
 | 
					        (0xBC, 0x47),
 | 
				
			||||||
            0x02, 0x8B, 0xAF, 0x46, 0x02, 0x88, 0x88, 0x88, 0x88, 0x88, 0x48, 0x13, 0x8B, 0xAF, 0x57,
 | 
					        (0xBF, 0x02, 0x11, 0x00),
 | 
				
			||||||
            0x13, 0x88, 0x88, 0x88, 0x88, 0x88, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 | 
					        (0xC0, 0x73, 0x73, 0x50, 0x50, 0x00, 0x00, 0x12, 0x70, 0x00),
 | 
				
			||||||
            0x00, 0x00, 0x00, 0x00,
 | 
					        (0xC1, 0x25, 0x00, 0x32, 0x32, 0x77, 0xE4, 0xFF, 0xFF, 0xCC, 0xCC, 0x77, 0x77),
 | 
				
			||||||
        ),
 | 
					        (0xC6, 0x82, 0x00, 0xBF, 0xFF, 0x00, 0xFF),
 | 
				
			||||||
        (
 | 
					        (0xC7, 0xB8, 0x00, 0x0A, 0x10, 0x01, 0x09),
 | 
				
			||||||
            0xEA, 0x96, 0x12, 0x01, 0x01, 0x01, 0x78, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4F, 0x31,
 | 
					        (0xC8, 0x10, 0x40, 0x1E, 0x02),
 | 
				
			||||||
            0x8B, 0xA8, 0x31, 0x75, 0x88, 0x88, 0x88, 0x88, 0x88, 0x4F, 0x20, 0x8B, 0xA8, 0x20, 0x64,
 | 
					        (0xCC, 0x0B),
 | 
				
			||||||
            0x88, 0x88, 0x88, 0x88, 0x88, 0x23, 0x00, 0x00, 0x01, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00,
 | 
					        (0xE0, 0x00, 0x0B, 0x10, 0x2C, 0x3D, 0x3F, 0x42, 0x3A, 0x07, 0x0D, 0x0F, 0x13, 0x15, 0x13, 0x14, 0x0F, 0x16, 0x00, 0x0B, 0x10, 0x2C, 0x3D, 0x3F, 0x42, 0x3A, 0x07, 0x0D, 0x0F, 0x13, 0x15, 0x13, 0x14, 0x0F, 0x16),
 | 
				
			||||||
            0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xA1, 0x80, 0x00, 0x00,
 | 
					        (0xE3, 0x07, 0x07, 0x0B, 0x0B, 0x0B, 0x0B, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0xC0, 0x10),
 | 
				
			||||||
            0x00, 0x00,
 | 
					        (0xE9, 0xC8, 0x10, 0x0A, 0x00, 0x00, 0x80, 0x81, 0x12, 0x31, 0x23, 0x4F, 0x86, 0xA0, 0x00, 0x47, 0x08, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x98, 0x02, 0x8B, 0xAF, 0x46, 0x02, 0x88, 0x88, 0x88, 0x88, 0x88, 0x98, 0x13, 0x8B, 0xAF, 0x57, 0x13, 0x88, 0x88, 0x88, 0x88, 0x88, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
 | 
				
			||||||
        ),
 | 
					        (0xEA, 0x97, 0x0C, 0x09, 0x09, 0x09, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x9F, 0x31, 0x8B, 0xA8, 0x31, 0x75, 0x88, 0x88, 0x88, 0x88, 0x88, 0x9F, 0x20, 0x8B, 0xA8, 0x20, 0x64, 0x88, 0x88, 0x88, 0x88, 0x88, 0x23, 0x00, 0x00, 0x02, 0x71, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x80, 0x81, 0x00, 0x00, 0x00, 0x00),
 | 
				
			||||||
        (
 | 
					        (0xEF, 0xFF, 0xFF, 0x01),
 | 
				
			||||||
            0xE0, 0x00, 0x0A, 0x0F, 0x29, 0x3B, 0x3F, 0x42, 0x39, 0x06, 0x0D, 0x10, 0x13, 0x15, 0x14,
 | 
					        (0x11, 0x00),
 | 
				
			||||||
            0x15, 0x10, 0x17, 0x00, 0x0A, 0x0F, 0x29, 0x3B, 0x3F, 0x42, 0x39, 0x06, 0x0D, 0x10, 0x13,
 | 
					        (0x29, 0x00),
 | 
				
			||||||
            0x15, 0x14, 0x15, 0x10, 0x17,
 | 
					 | 
				
			||||||
        ),
 | 
					 | 
				
			||||||
    ],
 | 
					    ],
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -37,6 +37,7 @@ from esphome.const import (
 | 
				
			|||||||
    CONF_DATA_RATE,
 | 
					    CONF_DATA_RATE,
 | 
				
			||||||
    CONF_DC_PIN,
 | 
					    CONF_DC_PIN,
 | 
				
			||||||
    CONF_DIMENSIONS,
 | 
					    CONF_DIMENSIONS,
 | 
				
			||||||
 | 
					    CONF_DISABLED,
 | 
				
			||||||
    CONF_ENABLE_PIN,
 | 
					    CONF_ENABLE_PIN,
 | 
				
			||||||
    CONF_ID,
 | 
					    CONF_ID,
 | 
				
			||||||
    CONF_INIT_SEQUENCE,
 | 
					    CONF_INIT_SEQUENCE,
 | 
				
			||||||
@@ -146,12 +147,15 @@ def swap_xy_schema(model):
 | 
				
			|||||||
def model_schema(config):
 | 
					def model_schema(config):
 | 
				
			||||||
    model = MODELS[config[CONF_MODEL]]
 | 
					    model = MODELS[config[CONF_MODEL]]
 | 
				
			||||||
    bus_mode = config[CONF_BUS_MODE]
 | 
					    bus_mode = config[CONF_BUS_MODE]
 | 
				
			||||||
    transform = cv.Schema(
 | 
					    transform = cv.Any(
 | 
				
			||||||
        {
 | 
					        cv.Schema(
 | 
				
			||||||
            cv.Required(CONF_MIRROR_X): cv.boolean,
 | 
					            {
 | 
				
			||||||
            cv.Required(CONF_MIRROR_Y): cv.boolean,
 | 
					                cv.Required(CONF_MIRROR_X): cv.boolean,
 | 
				
			||||||
            **swap_xy_schema(model),
 | 
					                cv.Required(CONF_MIRROR_Y): cv.boolean,
 | 
				
			||||||
        }
 | 
					                **swap_xy_schema(model),
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					        ),
 | 
				
			||||||
 | 
					        cv.one_of(CONF_DISABLED, lower=True),
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    # CUSTOM model will need to provide a custom init sequence
 | 
					    # CUSTOM model will need to provide a custom init sequence
 | 
				
			||||||
    iseqconf = (
 | 
					    iseqconf = (
 | 
				
			||||||
@@ -160,7 +164,11 @@ def model_schema(config):
 | 
				
			|||||||
        else cv.Optional(CONF_INIT_SEQUENCE)
 | 
					        else cv.Optional(CONF_INIT_SEQUENCE)
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    # Dimensions are optional if the model has a default width and the x-y transform is not overridden
 | 
					    # Dimensions are optional if the model has a default width and the x-y transform is not overridden
 | 
				
			||||||
    is_swapped = config.get(CONF_TRANSFORM, {}).get(CONF_SWAP_XY) is True
 | 
					    transform_config = config.get(CONF_TRANSFORM, {})
 | 
				
			||||||
 | 
					    is_swapped = (
 | 
				
			||||||
 | 
					        isinstance(transform_config, dict)
 | 
				
			||||||
 | 
					        and transform_config.get(CONF_SWAP_XY, False) is True
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
    cv_dimensions = (
 | 
					    cv_dimensions = (
 | 
				
			||||||
        cv.Optional if model.get_default(CONF_WIDTH) and not is_swapped else cv.Required
 | 
					        cv.Optional if model.get_default(CONF_WIDTH) and not is_swapped else cv.Required
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
@@ -192,9 +200,7 @@ def model_schema(config):
 | 
				
			|||||||
        .extend(
 | 
					        .extend(
 | 
				
			||||||
            {
 | 
					            {
 | 
				
			||||||
                cv.GenerateID(): cv.declare_id(MipiSpi),
 | 
					                cv.GenerateID(): cv.declare_id(MipiSpi),
 | 
				
			||||||
                cv_dimensions(CONF_DIMENSIONS): dimension_schema(
 | 
					                cv_dimensions(CONF_DIMENSIONS): dimension_schema(1),
 | 
				
			||||||
                    model.get_default(CONF_DRAW_ROUNDING, 1)
 | 
					 | 
				
			||||||
                ),
 | 
					 | 
				
			||||||
                model.option(CONF_ENABLE_PIN, cv.UNDEFINED): cv.ensure_list(
 | 
					                model.option(CONF_ENABLE_PIN, cv.UNDEFINED): cv.ensure_list(
 | 
				
			||||||
                    pins.gpio_output_pin_schema
 | 
					                    pins.gpio_output_pin_schema
 | 
				
			||||||
                ),
 | 
					                ),
 | 
				
			||||||
@@ -400,6 +406,7 @@ def get_instance(config):
 | 
				
			|||||||
                offset_height,
 | 
					                offset_height,
 | 
				
			||||||
                DISPLAY_ROTATIONS[rotation],
 | 
					                DISPLAY_ROTATIONS[rotation],
 | 
				
			||||||
                frac,
 | 
					                frac,
 | 
				
			||||||
 | 
					                config[CONF_DRAW_ROUNDING],
 | 
				
			||||||
            ]
 | 
					            ]
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        return MipiSpiBuffer, templateargs
 | 
					        return MipiSpiBuffer, templateargs
 | 
				
			||||||
@@ -431,7 +438,6 @@ async def to_code(config):
 | 
				
			|||||||
        else:
 | 
					        else:
 | 
				
			||||||
            config[CONF_ROTATION] = 0
 | 
					            config[CONF_ROTATION] = 0
 | 
				
			||||||
    cg.add(var.set_model(config[CONF_MODEL]))
 | 
					    cg.add(var.set_model(config[CONF_MODEL]))
 | 
				
			||||||
    cg.add(var.set_draw_rounding(config[CONF_DRAW_ROUNDING]))
 | 
					 | 
				
			||||||
    if enable_pin := config.get(CONF_ENABLE_PIN):
 | 
					    if enable_pin := config.get(CONF_ENABLE_PIN):
 | 
				
			||||||
        enable = [await cg.gpio_pin_expression(pin) for pin in enable_pin]
 | 
					        enable = [await cg.gpio_pin_expression(pin) for pin in enable_pin]
 | 
				
			||||||
        cg.add(var.set_enable_pins(enable))
 | 
					        cg.add(var.set_enable_pins(enable))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -38,7 +38,7 @@ static constexpr uint8_t MADCTL_BGR = 0x08;    // Bit 3 Blue-Green-Red pixel ord
 | 
				
			|||||||
static constexpr uint8_t MADCTL_XFLIP = 0x02;  // Mirror the display horizontally
 | 
					static constexpr uint8_t MADCTL_XFLIP = 0x02;  // Mirror the display horizontally
 | 
				
			||||||
static constexpr uint8_t MADCTL_YFLIP = 0x01;  // Mirror the display vertically
 | 
					static constexpr uint8_t MADCTL_YFLIP = 0x01;  // Mirror the display vertically
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static const uint8_t DELAY_FLAG = 0xFF;
 | 
					static constexpr uint8_t DELAY_FLAG = 0xFF;
 | 
				
			||||||
// store a 16 bit value in a buffer, big endian.
 | 
					// store a 16 bit value in a buffer, big endian.
 | 
				
			||||||
static inline void put16_be(uint8_t *buf, uint16_t value) {
 | 
					static inline void put16_be(uint8_t *buf, uint16_t value) {
 | 
				
			||||||
  buf[0] = value >> 8;
 | 
					  buf[0] = value >> 8;
 | 
				
			||||||
@@ -79,7 +79,7 @@ class MipiSpi : public display::Display,
 | 
				
			|||||||
                public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW, spi::CLOCK_PHASE_LEADING,
 | 
					                public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW, spi::CLOCK_PHASE_LEADING,
 | 
				
			||||||
                                      spi::DATA_RATE_1MHZ> {
 | 
					                                      spi::DATA_RATE_1MHZ> {
 | 
				
			||||||
 public:
 | 
					 public:
 | 
				
			||||||
  MipiSpi() {}
 | 
					  MipiSpi() = default;
 | 
				
			||||||
  void update() override { this->stop_poller(); }
 | 
					  void update() override { this->stop_poller(); }
 | 
				
			||||||
  void draw_pixel_at(int x, int y, Color color) override {}
 | 
					  void draw_pixel_at(int x, int y, Color color) override {}
 | 
				
			||||||
  void set_model(const char *model) { this->model_ = model; }
 | 
					  void set_model(const char *model) { this->model_ = model; }
 | 
				
			||||||
@@ -99,7 +99,6 @@ class MipiSpi : public display::Display,
 | 
				
			|||||||
  int get_width_internal() override { return WIDTH; }
 | 
					  int get_width_internal() override { return WIDTH; }
 | 
				
			||||||
  int get_height_internal() override { return HEIGHT; }
 | 
					  int get_height_internal() override { return HEIGHT; }
 | 
				
			||||||
  void set_init_sequence(const std::vector<uint8_t> &sequence) { this->init_sequence_ = sequence; }
 | 
					  void set_init_sequence(const std::vector<uint8_t> &sequence) { this->init_sequence_ = sequence; }
 | 
				
			||||||
  void set_draw_rounding(unsigned rounding) { this->draw_rounding_ = rounding; }
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // reset the display, and write the init sequence
 | 
					  // reset the display, and write the init sequence
 | 
				
			||||||
  void setup() override {
 | 
					  void setup() override {
 | 
				
			||||||
@@ -326,6 +325,7 @@ class MipiSpi : public display::Display,
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  /**
 | 
					  /**
 | 
				
			||||||
   * Writes a buffer to the display.
 | 
					   * Writes a buffer to the display.
 | 
				
			||||||
 | 
					   * @param ptr The pointer to the pixel data
 | 
				
			||||||
   * @param w Width of each line in bytes
 | 
					   * @param w Width of each line in bytes
 | 
				
			||||||
   * @param h Height of the buffer in rows
 | 
					   * @param h Height of the buffer in rows
 | 
				
			||||||
   * @param pad Padding in bytes after each line
 | 
					   * @param pad Padding in bytes after each line
 | 
				
			||||||
@@ -424,7 +424,6 @@ class MipiSpi : public display::Display,
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  // other properties set by configuration
 | 
					  // other properties set by configuration
 | 
				
			||||||
  bool invert_colors_{};
 | 
					  bool invert_colors_{};
 | 
				
			||||||
  unsigned draw_rounding_{2};
 | 
					 | 
				
			||||||
  optional<uint8_t> brightness_{};
 | 
					  optional<uint8_t> brightness_{};
 | 
				
			||||||
  const char *model_{"Unknown"};
 | 
					  const char *model_{"Unknown"};
 | 
				
			||||||
  std::vector<uint8_t> init_sequence_{};
 | 
					  std::vector<uint8_t> init_sequence_{};
 | 
				
			||||||
@@ -444,12 +443,20 @@ class MipiSpi : public display::Display,
 | 
				
			|||||||
 * @tparam OFFSET_WIDTH The x-offset of the display in pixels
 | 
					 * @tparam OFFSET_WIDTH The x-offset of the display in pixels
 | 
				
			||||||
 * @tparam OFFSET_HEIGHT The y-offset of the display in pixels
 | 
					 * @tparam OFFSET_HEIGHT The y-offset of the display in pixels
 | 
				
			||||||
 * @tparam FRACTION The fraction of the display size to use for the buffer (e.g. 4 means a 1/4 buffer).
 | 
					 * @tparam FRACTION The fraction of the display size to use for the buffer (e.g. 4 means a 1/4 buffer).
 | 
				
			||||||
 | 
					 * @tparam ROUNDING The alignment requirement for drawing operations (e.g. 2 means that x coordinates must be even)
 | 
				
			||||||
 */
 | 
					 */
 | 
				
			||||||
template<typename BUFFERTYPE, PixelMode BUFFERPIXEL, bool IS_BIG_ENDIAN, PixelMode DISPLAYPIXEL, BusType BUS_TYPE,
 | 
					template<typename BUFFERTYPE, PixelMode BUFFERPIXEL, bool IS_BIG_ENDIAN, PixelMode DISPLAYPIXEL, BusType BUS_TYPE,
 | 
				
			||||||
         int WIDTH, int HEIGHT, int OFFSET_WIDTH, int OFFSET_HEIGHT, display::DisplayRotation ROTATION, int FRACTION>
 | 
					         uint16_t WIDTH, uint16_t HEIGHT, int OFFSET_WIDTH, int OFFSET_HEIGHT, display::DisplayRotation ROTATION,
 | 
				
			||||||
 | 
					         int FRACTION, unsigned ROUNDING>
 | 
				
			||||||
class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT,
 | 
					class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT,
 | 
				
			||||||
                                     OFFSET_WIDTH, OFFSET_HEIGHT> {
 | 
					                                     OFFSET_WIDTH, OFFSET_HEIGHT> {
 | 
				
			||||||
 public:
 | 
					 public:
 | 
				
			||||||
 | 
					  // these values define the buffer size needed to write in accordance with the chip pixel alignment
 | 
				
			||||||
 | 
					  // requirements. If the required rounding does not divide the width and height, we round up to the next multiple and
 | 
				
			||||||
 | 
					  // ignore the extra columns and rows when drawing, but use them to write to the display.
 | 
				
			||||||
 | 
					  static constexpr unsigned BUFFER_WIDTH = (WIDTH + ROUNDING - 1) / ROUNDING * ROUNDING;
 | 
				
			||||||
 | 
					  static constexpr unsigned BUFFER_HEIGHT = (HEIGHT + ROUNDING - 1) / ROUNDING * ROUNDING;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  MipiSpiBuffer() { this->rotation_ = ROTATION; }
 | 
					  MipiSpiBuffer() { this->rotation_ = ROTATION; }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  void dump_config() override {
 | 
					  void dump_config() override {
 | 
				
			||||||
@@ -461,15 +468,15 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
                    "  Buffer fraction: 1/%d\n"
 | 
					                    "  Buffer fraction: 1/%d\n"
 | 
				
			||||||
                    "  Buffer bytes: %zu\n"
 | 
					                    "  Buffer bytes: %zu\n"
 | 
				
			||||||
                    "  Draw rounding: %u",
 | 
					                    "  Draw rounding: %u",
 | 
				
			||||||
                    this->rotation_, BUFFERPIXEL * 8, FRACTION, sizeof(BUFFERTYPE) * WIDTH * HEIGHT / FRACTION,
 | 
					                    this->rotation_, BUFFERPIXEL * 8, FRACTION,
 | 
				
			||||||
                    this->draw_rounding_);
 | 
					                    sizeof(BUFFERTYPE) * BUFFER_WIDTH * BUFFER_HEIGHT / FRACTION, ROUNDING);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  void setup() override {
 | 
					  void setup() override {
 | 
				
			||||||
    MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT, OFFSET_WIDTH,
 | 
					    MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DISPLAYPIXEL, BUS_TYPE, WIDTH, HEIGHT, OFFSET_WIDTH,
 | 
				
			||||||
            OFFSET_HEIGHT>::setup();
 | 
					            OFFSET_HEIGHT>::setup();
 | 
				
			||||||
    RAMAllocator<BUFFERTYPE> allocator{};
 | 
					    RAMAllocator<BUFFERTYPE> allocator{};
 | 
				
			||||||
    this->buffer_ = allocator.allocate(WIDTH * HEIGHT / FRACTION);
 | 
					    this->buffer_ = allocator.allocate(BUFFER_WIDTH * BUFFER_HEIGHT / FRACTION);
 | 
				
			||||||
    if (this->buffer_ == nullptr) {
 | 
					    if (this->buffer_ == nullptr) {
 | 
				
			||||||
      this->mark_failed("Buffer allocation failed");
 | 
					      this->mark_failed("Buffer allocation failed");
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -508,15 +515,14 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
      esph_log_v(TAG, "x_low %d, y_low %d, x_high %d, y_high %d", this->x_low_, this->y_low_, this->x_high_,
 | 
					      esph_log_v(TAG, "x_low %d, y_low %d, x_high %d, y_high %d", this->x_low_, this->y_low_, this->x_high_,
 | 
				
			||||||
                 this->y_high_);
 | 
					                 this->y_high_);
 | 
				
			||||||
      // Some chips require that the drawing window be aligned on certain boundaries
 | 
					      // Some chips require that the drawing window be aligned on certain boundaries
 | 
				
			||||||
      auto dr = this->draw_rounding_;
 | 
					      this->x_low_ = this->x_low_ / ROUNDING * ROUNDING;
 | 
				
			||||||
      this->x_low_ = this->x_low_ / dr * dr;
 | 
					      this->y_low_ = this->y_low_ / ROUNDING * ROUNDING;
 | 
				
			||||||
      this->y_low_ = this->y_low_ / dr * dr;
 | 
					      this->x_high_ = (this->x_high_ + ROUNDING) / ROUNDING * ROUNDING - 1;
 | 
				
			||||||
      this->x_high_ = (this->x_high_ + dr) / dr * dr - 1;
 | 
					      this->y_high_ = (this->y_high_ + ROUNDING) / ROUNDING * ROUNDING - 1;
 | 
				
			||||||
      this->y_high_ = (this->y_high_ + dr) / dr * dr - 1;
 | 
					 | 
				
			||||||
      int w = this->x_high_ - this->x_low_ + 1;
 | 
					      int w = this->x_high_ - this->x_low_ + 1;
 | 
				
			||||||
      int h = this->y_high_ - this->y_low_ + 1;
 | 
					      int h = this->y_high_ - this->y_low_ + 1;
 | 
				
			||||||
      this->write_to_display_(this->x_low_, this->y_low_, w, h, this->buffer_, this->x_low_,
 | 
					      this->write_to_display_(this->x_low_, this->y_low_, w, h, this->buffer_, this->x_low_,
 | 
				
			||||||
                              this->y_low_ - this->start_line_, WIDTH - w);
 | 
					                              this->y_low_ - this->start_line_, BUFFER_WIDTH - w);
 | 
				
			||||||
      // invalidate watermarks
 | 
					      // invalidate watermarks
 | 
				
			||||||
      this->x_low_ = WIDTH;
 | 
					      this->x_low_ = WIDTH;
 | 
				
			||||||
      this->y_low_ = HEIGHT;
 | 
					      this->y_low_ = HEIGHT;
 | 
				
			||||||
@@ -536,10 +542,10 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
  void draw_pixel_at(int x, int y, Color color) override {
 | 
					  void draw_pixel_at(int x, int y, Color color) override {
 | 
				
			||||||
    if (!this->get_clipping().inside(x, y))
 | 
					    if (!this->get_clipping().inside(x, y))
 | 
				
			||||||
      return;
 | 
					      return;
 | 
				
			||||||
    rotate_coordinates_(x, y);
 | 
					    rotate_coordinates(x, y);
 | 
				
			||||||
    if (x < 0 || x >= WIDTH || y < this->start_line_ || y >= this->end_line_)
 | 
					    if (x < 0 || x >= WIDTH || y < this->start_line_ || y >= this->end_line_)
 | 
				
			||||||
      return;
 | 
					      return;
 | 
				
			||||||
    this->buffer_[(y - this->start_line_) * WIDTH + x] = convert_color_(color);
 | 
					    this->buffer_[(y - this->start_line_) * BUFFER_WIDTH + x] = convert_color(color);
 | 
				
			||||||
    if (x < this->x_low_) {
 | 
					    if (x < this->x_low_) {
 | 
				
			||||||
      this->x_low_ = x;
 | 
					      this->x_low_ = x;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -560,7 +566,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
    this->y_low_ = this->start_line_;
 | 
					    this->y_low_ = this->start_line_;
 | 
				
			||||||
    this->x_high_ = WIDTH - 1;
 | 
					    this->x_high_ = WIDTH - 1;
 | 
				
			||||||
    this->y_high_ = this->end_line_ - 1;
 | 
					    this->y_high_ = this->end_line_ - 1;
 | 
				
			||||||
    std::fill_n(this->buffer_, HEIGHT * WIDTH / FRACTION, convert_color_(color));
 | 
					    std::fill_n(this->buffer_, HEIGHT * BUFFER_WIDTH / FRACTION, convert_color(color));
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  int get_width() override {
 | 
					  int get_width() override {
 | 
				
			||||||
@@ -577,7 +583,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 protected:
 | 
					 protected:
 | 
				
			||||||
  // Rotate the coordinates to match the display orientation.
 | 
					  // Rotate the coordinates to match the display orientation.
 | 
				
			||||||
  void rotate_coordinates_(int &x, int &y) const {
 | 
					  static void rotate_coordinates(int &x, int &y) {
 | 
				
			||||||
    if constexpr (ROTATION == display::DISPLAY_ROTATION_180_DEGREES) {
 | 
					    if constexpr (ROTATION == display::DISPLAY_ROTATION_180_DEGREES) {
 | 
				
			||||||
      x = WIDTH - x - 1;
 | 
					      x = WIDTH - x - 1;
 | 
				
			||||||
      y = HEIGHT - y - 1;
 | 
					      y = HEIGHT - y - 1;
 | 
				
			||||||
@@ -593,7 +599,7 @@ class MipiSpiBuffer : public MipiSpi<BUFFERTYPE, BUFFERPIXEL, IS_BIG_ENDIAN, DIS
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Convert a color to the buffer pixel format.
 | 
					  // Convert a color to the buffer pixel format.
 | 
				
			||||||
  BUFFERTYPE convert_color_(Color &color) const {
 | 
					  static BUFFERTYPE convert_color(const Color &color) {
 | 
				
			||||||
    if constexpr (BUFFERPIXEL == PIXEL_MODE_8) {
 | 
					    if constexpr (BUFFERPIXEL == PIXEL_MODE_8) {
 | 
				
			||||||
      return (color.red & 0xE0) | (color.g & 0xE0) >> 3 | color.b >> 6;
 | 
					      return (color.red & 0xE0) | (color.g & 0xE0) >> 3 | color.b >> 6;
 | 
				
			||||||
    } else if constexpr (BUFFERPIXEL == PIXEL_MODE_16) {
 | 
					    } else if constexpr (BUFFERPIXEL == PIXEL_MODE_16) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -3,6 +3,7 @@ import esphome.config_validation as cv
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from .amoled import CO5300
 | 
					from .amoled import CO5300
 | 
				
			||||||
from .ili import ILI9488_A
 | 
					from .ili import ILI9488_A
 | 
				
			||||||
 | 
					from .jc import AXS15231
 | 
				
			||||||
 | 
					
 | 
				
			||||||
DriverChip(
 | 
					DriverChip(
 | 
				
			||||||
    "WAVESHARE-4-TFT",
 | 
					    "WAVESHARE-4-TFT",
 | 
				
			||||||
@@ -152,3 +153,12 @@ CO5300.extend(
 | 
				
			|||||||
    cs_pin=12,
 | 
					    cs_pin=12,
 | 
				
			||||||
    reset_pin=39,
 | 
					    reset_pin=39,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					AXS15231.extend(
 | 
				
			||||||
 | 
					    "WAVESHARE-ESP32-S3-TOUCH-LCD-3.49",
 | 
				
			||||||
 | 
					    width=172,
 | 
				
			||||||
 | 
					    height=640,
 | 
				
			||||||
 | 
					    data_rate="80MHz",
 | 
				
			||||||
 | 
					    cs_pin=9,
 | 
				
			||||||
 | 
					    reset_pin=21,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -7,7 +7,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
#include "opentherm.h"
 | 
					#include "opentherm.h"
 | 
				
			||||||
#include "esphome/core/helpers.h"
 | 
					#include "esphome/core/helpers.h"
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
#include "driver/timer.h"
 | 
					#include "driver/timer.h"
 | 
				
			||||||
#include "esp_err.h"
 | 
					#include "esp_err.h"
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
@@ -31,7 +31,7 @@ OpenTherm *OpenTherm::instance = nullptr;
 | 
				
			|||||||
OpenTherm::OpenTherm(InternalGPIOPin *in_pin, InternalGPIOPin *out_pin, int32_t device_timeout)
 | 
					OpenTherm::OpenTherm(InternalGPIOPin *in_pin, InternalGPIOPin *out_pin, int32_t device_timeout)
 | 
				
			||||||
    : in_pin_(in_pin),
 | 
					    : in_pin_(in_pin),
 | 
				
			||||||
      out_pin_(out_pin),
 | 
					      out_pin_(out_pin),
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
      timer_group_(TIMER_GROUP_0),
 | 
					      timer_group_(TIMER_GROUP_0),
 | 
				
			||||||
      timer_idx_(TIMER_0),
 | 
					      timer_idx_(TIMER_0),
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
@@ -57,7 +57,7 @@ bool OpenTherm::initialize() {
 | 
				
			|||||||
  this->out_pin_->setup();
 | 
					  this->out_pin_->setup();
 | 
				
			||||||
  this->out_pin_->digital_write(true);
 | 
					  this->out_pin_->digital_write(true);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
  return this->init_esp32_timer_();
 | 
					  return this->init_esp32_timer_();
 | 
				
			||||||
#else
 | 
					#else
 | 
				
			||||||
  return true;
 | 
					  return true;
 | 
				
			||||||
@@ -238,7 +238,7 @@ void IRAM_ATTR OpenTherm::write_bit_(uint8_t high, uint8_t clock) {
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
 | 
					
 | 
				
			||||||
bool OpenTherm::init_esp32_timer_() {
 | 
					bool OpenTherm::init_esp32_timer_() {
 | 
				
			||||||
  // Search for a free timer. Maybe unstable, we'll see.
 | 
					  // Search for a free timer. Maybe unstable, we'll see.
 | 
				
			||||||
@@ -365,7 +365,7 @@ void IRAM_ATTR OpenTherm::stop_timer_() {
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#endif  // END ESP32
 | 
					#endif  // USE_ESP32
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef ESP8266
 | 
					#ifdef ESP8266
 | 
				
			||||||
// 5 kHz timer_
 | 
					// 5 kHz timer_
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -12,7 +12,7 @@
 | 
				
			|||||||
#include "esphome/core/helpers.h"
 | 
					#include "esphome/core/helpers.h"
 | 
				
			||||||
#include "esphome/core/log.h"
 | 
					#include "esphome/core/log.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
#include "driver/timer.h"
 | 
					#include "driver/timer.h"
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -356,7 +356,7 @@ class OpenTherm {
 | 
				
			|||||||
  ISRInternalGPIOPin isr_in_pin_;
 | 
					  ISRInternalGPIOPin isr_in_pin_;
 | 
				
			||||||
  ISRInternalGPIOPin isr_out_pin_;
 | 
					  ISRInternalGPIOPin isr_out_pin_;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
  timer_group_t timer_group_;
 | 
					  timer_group_t timer_group_;
 | 
				
			||||||
  timer_idx_t timer_idx_;
 | 
					  timer_idx_t timer_idx_;
 | 
				
			||||||
#endif
 | 
					#endif
 | 
				
			||||||
@@ -370,7 +370,7 @@ class OpenTherm {
 | 
				
			|||||||
  int32_t timeout_counter_;  // <0 no timeout
 | 
					  int32_t timeout_counter_;  // <0 no timeout
 | 
				
			||||||
  int32_t device_timeout_;
 | 
					  int32_t device_timeout_;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#if defined(ESP32) || defined(USE_ESP_IDF)
 | 
					#ifdef USE_ESP32
 | 
				
			||||||
  esp_err_t timer_error_ = ESP_OK;
 | 
					  esp_err_t timer_error_ = ESP_OK;
 | 
				
			||||||
  TimerErrorType timer_error_type_ = TimerErrorType::NO_TIMER_ERROR;
 | 
					  TimerErrorType timer_error_type_ = TimerErrorType::NO_TIMER_ERROR;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -180,10 +180,12 @@ void OpenThreadSrpComponent::setup() {
 | 
				
			|||||||
    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
					    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
				
			||||||
    for (size_t i = 0; i < service.txt_records.size(); i++) {
 | 
					    for (size_t i = 0; i < service.txt_records.size(); i++) {
 | 
				
			||||||
      const auto &txt = service.txt_records[i];
 | 
					      const auto &txt = service.txt_records[i];
 | 
				
			||||||
      auto value = const_cast<TemplatableValue<std::string> &>(txt.value).value();
 | 
					      // Value is either a compile-time string literal in flash or a pointer to dynamic_txt_values_
 | 
				
			||||||
 | 
					      // OpenThread SRP client expects the data to persist, so we strdup it
 | 
				
			||||||
 | 
					      const char *value_str = MDNS_STR_ARG(txt.value);
 | 
				
			||||||
      txt_entries[i].mKey = MDNS_STR_ARG(txt.key);
 | 
					      txt_entries[i].mKey = MDNS_STR_ARG(txt.key);
 | 
				
			||||||
      txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value.c_str()));
 | 
					      txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value_str));
 | 
				
			||||||
      txt_entries[i].mValueLength = value.size();
 | 
					      txt_entries[i].mValueLength = strlen(value_str);
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    entry->mService.mTxtEntries = txt_entries;
 | 
					    entry->mService.mTxtEntries = txt_entries;
 | 
				
			||||||
    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
					    entry->mService.mNumTxtEntries = service.txt_records.size();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -63,6 +63,8 @@ SPIRAM_SPEEDS = {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def supported() -> bool:
 | 
					def supported() -> bool:
 | 
				
			||||||
 | 
					    if not CORE.is_esp32:
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
    variant = get_esp32_variant()
 | 
					    variant = get_esp32_variant()
 | 
				
			||||||
    return variant in SPIRAM_MODES
 | 
					    return variant in SPIRAM_MODES
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -81,7 +81,7 @@ CONFIG_SCHEMA = (
 | 
				
			|||||||
                cv.int_range(min=0, max=0xFFFF, max_included=False),
 | 
					                cv.int_range(min=0, max=0xFFFF, max_included=False),
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
            cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
 | 
					            cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION): cv.pressure,
 | 
				
			||||||
            cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature,
 | 
					            cv.Optional(CONF_TEMPERATURE_OFFSET, default="4°C"): cv.temperature_delta,
 | 
				
			||||||
            cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
 | 
					            cv.Optional(CONF_AMBIENT_PRESSURE_COMPENSATION_SOURCE): cv.use_id(
 | 
				
			||||||
                sensor.Sensor
 | 
					                sensor.Sensor
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -6,7 +6,7 @@ from pathlib import Path
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from esphome import automation, external_files
 | 
					from esphome import automation, external_files
 | 
				
			||||||
import esphome.codegen as cg
 | 
					import esphome.codegen as cg
 | 
				
			||||||
from esphome.components import audio, esp32, media_player, speaker
 | 
					from esphome.components import audio, esp32, media_player, psram, speaker
 | 
				
			||||||
import esphome.config_validation as cv
 | 
					import esphome.config_validation as cv
 | 
				
			||||||
from esphome.const import (
 | 
					from esphome.const import (
 | 
				
			||||||
    CONF_BUFFER_SIZE,
 | 
					    CONF_BUFFER_SIZE,
 | 
				
			||||||
@@ -26,10 +26,21 @@ from esphome.const import (
 | 
				
			|||||||
from esphome.core import CORE, HexInt
 | 
					from esphome.core import CORE, HexInt
 | 
				
			||||||
from esphome.core.entity_helpers import inherit_property_from
 | 
					from esphome.core.entity_helpers import inherit_property_from
 | 
				
			||||||
from esphome.external_files import download_content
 | 
					from esphome.external_files import download_content
 | 
				
			||||||
 | 
					from esphome.types import ConfigType
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_LOGGER = logging.getLogger(__name__)
 | 
					_LOGGER = logging.getLogger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
AUTO_LOAD = ["audio", "psram"]
 | 
					
 | 
				
			||||||
 | 
					def AUTO_LOAD(config: ConfigType) -> list[str]:
 | 
				
			||||||
 | 
					    load = ["audio"]
 | 
				
			||||||
 | 
					    if (
 | 
				
			||||||
 | 
					        not config
 | 
				
			||||||
 | 
					        or config.get(CONF_TASK_STACK_IN_PSRAM)
 | 
				
			||||||
 | 
					        or config.get(CONF_CODEC_SUPPORT_ENABLED)
 | 
				
			||||||
 | 
					    ):
 | 
				
			||||||
 | 
					        return load + ["psram"]
 | 
				
			||||||
 | 
					    return load
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
CODEOWNERS = ["@kahrendt", "@synesthesiam"]
 | 
					CODEOWNERS = ["@kahrendt", "@synesthesiam"]
 | 
				
			||||||
DOMAIN = "media_player"
 | 
					DOMAIN = "media_player"
 | 
				
			||||||
@@ -279,7 +290,9 @@ CONFIG_SCHEMA = cv.All(
 | 
				
			|||||||
            cv.Optional(CONF_BUFFER_SIZE, default=1000000): cv.int_range(
 | 
					            cv.Optional(CONF_BUFFER_SIZE, default=1000000): cv.int_range(
 | 
				
			||||||
                min=4000, max=4000000
 | 
					                min=4000, max=4000000
 | 
				
			||||||
            ),
 | 
					            ),
 | 
				
			||||||
            cv.Optional(CONF_CODEC_SUPPORT_ENABLED, default=True): cv.boolean,
 | 
					            cv.Optional(
 | 
				
			||||||
 | 
					                CONF_CODEC_SUPPORT_ENABLED, default=psram.supported()
 | 
				
			||||||
 | 
					            ): cv.boolean,
 | 
				
			||||||
            cv.Optional(CONF_FILES): cv.ensure_list(MEDIA_FILE_TYPE_SCHEMA),
 | 
					            cv.Optional(CONF_FILES): cv.ensure_list(MEDIA_FILE_TYPE_SCHEMA),
 | 
				
			||||||
            cv.Optional(CONF_TASK_STACK_IN_PSRAM, default=False): cv.boolean,
 | 
					            cv.Optional(CONF_TASK_STACK_IN_PSRAM, default=False): cv.boolean,
 | 
				
			||||||
            cv.Optional(CONF_VOLUME_INCREMENT, default=0.05): cv.percentage,
 | 
					            cv.Optional(CONF_VOLUME_INCREMENT, default=0.05): cv.percentage,
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,7 +1,7 @@
 | 
				
			|||||||
import logging
 | 
					import logging
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from esphome import core
 | 
					from esphome import core
 | 
				
			||||||
from esphome.config_helpers import Extend, Remove, merge_config
 | 
					from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
 | 
				
			||||||
import esphome.config_validation as cv
 | 
					import esphome.config_validation as cv
 | 
				
			||||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
 | 
					from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
 | 
				
			||||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
 | 
					from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
 | 
				
			||||||
@@ -170,10 +170,10 @@ def do_substitution_pass(config, command_line_substitutions, ignore_missing=Fals
 | 
				
			|||||||
        return
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Merge substitutions in config, overriding with substitutions coming from command line:
 | 
					    # Merge substitutions in config, overriding with substitutions coming from command line:
 | 
				
			||||||
    substitutions = {
 | 
					    # Use merge_dicts_ordered to preserve OrderedDict type for move_to_end()
 | 
				
			||||||
        **config.get(CONF_SUBSTITUTIONS, {}),
 | 
					    substitutions = merge_dicts_ordered(
 | 
				
			||||||
        **(command_line_substitutions or {}),
 | 
					        config.get(CONF_SUBSTITUTIONS, {}), command_line_substitutions or {}
 | 
				
			||||||
    }
 | 
					    )
 | 
				
			||||||
    with cv.prepend_path("substitutions"):
 | 
					    with cv.prepend_path("substitutions"):
 | 
				
			||||||
        if not isinstance(substitutions, dict):
 | 
					        if not isinstance(substitutions, dict):
 | 
				
			||||||
            raise cv.Invalid(
 | 
					            raise cv.Invalid(
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -347,7 +347,7 @@ def final_validate_device_schema(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def validate_pin(opt, device):
 | 
					    def validate_pin(opt, device):
 | 
				
			||||||
        def validator(value):
 | 
					        def validator(value):
 | 
				
			||||||
            if opt in device:
 | 
					            if opt in device and not CORE.testing_mode:
 | 
				
			||||||
                raise cv.Invalid(
 | 
					                raise cv.Invalid(
 | 
				
			||||||
                    f"The uart {opt} is used both by {name} and {device[opt]}, "
 | 
					                    f"The uart {opt} is used both by {name} and {device[opt]}, "
 | 
				
			||||||
                    f"but can only be used by one. Please create a new uart bus for {name}."
 | 
					                    f"but can only be used by one. Please create a new uart bus for {name}."
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -56,6 +56,13 @@ uint32_t ESP8266UartComponent::get_config() {
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void ESP8266UartComponent::setup() {
 | 
					void ESP8266UartComponent::setup() {
 | 
				
			||||||
 | 
					  if (this->rx_pin_) {
 | 
				
			||||||
 | 
					    this->rx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
 | 
				
			||||||
 | 
					    this->tx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Use Arduino HardwareSerial UARTs if all used pins match the ones
 | 
					  // Use Arduino HardwareSerial UARTs if all used pins match the ones
 | 
				
			||||||
  // preconfigured by the platform. For example if RX disabled but TX pin
 | 
					  // preconfigured by the platform. For example if RX disabled but TX pin
 | 
				
			||||||
  // is 1 we still want to use Serial.
 | 
					  // is 1 we still want to use Serial.
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -6,6 +6,9 @@
 | 
				
			|||||||
#include "esphome/core/defines.h"
 | 
					#include "esphome/core/defines.h"
 | 
				
			||||||
#include "esphome/core/helpers.h"
 | 
					#include "esphome/core/helpers.h"
 | 
				
			||||||
#include "esphome/core/log.h"
 | 
					#include "esphome/core/log.h"
 | 
				
			||||||
 | 
					#include "esphome/core/gpio.h"
 | 
				
			||||||
 | 
					#include "driver/gpio.h"
 | 
				
			||||||
 | 
					#include "soc/gpio_num.h"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_LOGGER
 | 
					#ifdef USE_LOGGER
 | 
				
			||||||
#include "esphome/components/logger/logger.h"
 | 
					#include "esphome/components/logger/logger.h"
 | 
				
			||||||
@@ -104,6 +107,13 @@ void IDFUARTComponent::load_settings(bool dump_config) {
 | 
				
			|||||||
    return;
 | 
					    return;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (this->rx_pin_) {
 | 
				
			||||||
 | 
					    this->rx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
 | 
				
			||||||
 | 
					    this->tx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
 | 
					  int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
 | 
				
			||||||
  int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
 | 
					  int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
 | 
				
			||||||
  int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
 | 
					  int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -46,6 +46,13 @@ uint16_t LibreTinyUARTComponent::get_config() {
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void LibreTinyUARTComponent::setup() {
 | 
					void LibreTinyUARTComponent::setup() {
 | 
				
			||||||
 | 
					  if (this->rx_pin_) {
 | 
				
			||||||
 | 
					    this->rx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
 | 
				
			||||||
 | 
					    this->tx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin();
 | 
					  int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin();
 | 
				
			||||||
  int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin();
 | 
					  int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin();
 | 
				
			||||||
  bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted();
 | 
					  bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted();
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -52,6 +52,13 @@ uint16_t RP2040UartComponent::get_config() {
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
void RP2040UartComponent::setup() {
 | 
					void RP2040UartComponent::setup() {
 | 
				
			||||||
 | 
					  if (this->rx_pin_) {
 | 
				
			||||||
 | 
					    this->rx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
 | 
				
			||||||
 | 
					    this->tx_pin_->setup();
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  uint16_t config = get_config();
 | 
					  uint16_t config = get_config();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28});
 | 
					  constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28});
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -9,6 +9,7 @@ from esphome.components.esp32 import (
 | 
				
			|||||||
import esphome.config_validation as cv
 | 
					import esphome.config_validation as cv
 | 
				
			||||||
from esphome.const import CONF_DEVICES, CONF_ID
 | 
					from esphome.const import CONF_DEVICES, CONF_ID
 | 
				
			||||||
from esphome.cpp_types import Component
 | 
					from esphome.cpp_types import Component
 | 
				
			||||||
 | 
					from esphome.types import ConfigType
 | 
				
			||||||
 | 
					
 | 
				
			||||||
AUTO_LOAD = ["bytebuffer"]
 | 
					AUTO_LOAD = ["bytebuffer"]
 | 
				
			||||||
CODEOWNERS = ["@clydebarrow"]
 | 
					CODEOWNERS = ["@clydebarrow"]
 | 
				
			||||||
@@ -20,6 +21,7 @@ USBClient = usb_host_ns.class_("USBClient", Component)
 | 
				
			|||||||
CONF_VID = "vid"
 | 
					CONF_VID = "vid"
 | 
				
			||||||
CONF_PID = "pid"
 | 
					CONF_PID = "pid"
 | 
				
			||||||
CONF_ENABLE_HUBS = "enable_hubs"
 | 
					CONF_ENABLE_HUBS = "enable_hubs"
 | 
				
			||||||
 | 
					CONF_MAX_TRANSFER_REQUESTS = "max_transfer_requests"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def usb_device_schema(cls=USBClient, vid: int = None, pid: [int] = None) -> cv.Schema:
 | 
					def usb_device_schema(cls=USBClient, vid: int = None, pid: [int] = None) -> cv.Schema:
 | 
				
			||||||
@@ -44,6 +46,9 @@ CONFIG_SCHEMA = cv.All(
 | 
				
			|||||||
        {
 | 
					        {
 | 
				
			||||||
            cv.GenerateID(): cv.declare_id(USBHost),
 | 
					            cv.GenerateID(): cv.declare_id(USBHost),
 | 
				
			||||||
            cv.Optional(CONF_ENABLE_HUBS, default=False): cv.boolean,
 | 
					            cv.Optional(CONF_ENABLE_HUBS, default=False): cv.boolean,
 | 
				
			||||||
 | 
					            cv.Optional(CONF_MAX_TRANSFER_REQUESTS, default=16): cv.int_range(
 | 
				
			||||||
 | 
					                min=1, max=32
 | 
				
			||||||
 | 
					            ),
 | 
				
			||||||
            cv.Optional(CONF_DEVICES): cv.ensure_list(usb_device_schema()),
 | 
					            cv.Optional(CONF_DEVICES): cv.ensure_list(usb_device_schema()),
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
    ),
 | 
					    ),
 | 
				
			||||||
@@ -58,10 +63,14 @@ async def register_usb_client(config):
 | 
				
			|||||||
    return var
 | 
					    return var
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def to_code(config):
 | 
					async def to_code(config: ConfigType) -> None:
 | 
				
			||||||
    add_idf_sdkconfig_option("CONFIG_USB_HOST_CONTROL_TRANSFER_MAX_SIZE", 1024)
 | 
					    add_idf_sdkconfig_option("CONFIG_USB_HOST_CONTROL_TRANSFER_MAX_SIZE", 1024)
 | 
				
			||||||
    if config.get(CONF_ENABLE_HUBS):
 | 
					    if config.get(CONF_ENABLE_HUBS):
 | 
				
			||||||
        add_idf_sdkconfig_option("CONFIG_USB_HOST_HUBS_SUPPORTED", True)
 | 
					        add_idf_sdkconfig_option("CONFIG_USB_HOST_HUBS_SUPPORTED", True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    max_requests = config[CONF_MAX_TRANSFER_REQUESTS]
 | 
				
			||||||
 | 
					    cg.add_define("USB_HOST_MAX_REQUESTS", max_requests)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    var = cg.new_Pvariable(config[CONF_ID])
 | 
					    var = cg.new_Pvariable(config[CONF_ID])
 | 
				
			||||||
    await cg.register_component(var, config)
 | 
					    await cg.register_component(var, config)
 | 
				
			||||||
    for device in config.get(CONF_DEVICES) or ():
 | 
					    for device in config.get(CONF_DEVICES) or ():
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -2,6 +2,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
// Should not be needed, but it's required to pass CI clang-tidy checks
 | 
					// Should not be needed, but it's required to pass CI clang-tidy checks
 | 
				
			||||||
#if defined(USE_ESP32_VARIANT_ESP32S2) || defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32P4)
 | 
					#if defined(USE_ESP32_VARIANT_ESP32S2) || defined(USE_ESP32_VARIANT_ESP32S3) || defined(USE_ESP32_VARIANT_ESP32P4)
 | 
				
			||||||
 | 
					#include "esphome/core/defines.h"
 | 
				
			||||||
#include "esphome/core/component.h"
 | 
					#include "esphome/core/component.h"
 | 
				
			||||||
#include <vector>
 | 
					#include <vector>
 | 
				
			||||||
#include "usb/usb_host.h"
 | 
					#include "usb/usb_host.h"
 | 
				
			||||||
@@ -16,23 +17,25 @@ namespace usb_host {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
// THREADING MODEL:
 | 
					// THREADING MODEL:
 | 
				
			||||||
// This component uses a dedicated USB task for event processing to prevent data loss.
 | 
					// This component uses a dedicated USB task for event processing to prevent data loss.
 | 
				
			||||||
// - USB Task (high priority): Handles USB events, executes transfer callbacks
 | 
					// - USB Task (high priority): Handles USB events, executes transfer callbacks, releases transfer slots
 | 
				
			||||||
// - Main Loop Task: Initiates transfers, processes completion events
 | 
					// - Main Loop Task: Initiates transfers, processes device connect/disconnect events
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// Thread-safe communication:
 | 
					// Thread-safe communication:
 | 
				
			||||||
// - Lock-free queues for USB task -> main loop events (SPSC pattern)
 | 
					// - Lock-free queues for USB task -> main loop events (SPSC pattern)
 | 
				
			||||||
// - Lock-free TransferRequest pool using atomic bitmask (MCSP pattern)
 | 
					// - Lock-free TransferRequest pool using atomic bitmask (MCMP pattern - multi-consumer, multi-producer)
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// TransferRequest pool access pattern:
 | 
					// TransferRequest pool access pattern:
 | 
				
			||||||
// - get_trq_() [allocate]: Called from BOTH USB task and main loop threads
 | 
					// - get_trq_() [allocate]: Called from BOTH USB task and main loop threads
 | 
				
			||||||
//   * USB task: via USB UART input callbacks that restart transfers immediately
 | 
					//   * USB task: via USB UART input callbacks that restart transfers immediately
 | 
				
			||||||
//   * Main loop: for output transfers and flow-controlled input restarts
 | 
					//   * Main loop: for output transfers and flow-controlled input restarts
 | 
				
			||||||
// - release_trq() [deallocate]: Called from main loop thread only
 | 
					// - release_trq() [deallocate]: Called from BOTH USB task and main loop threads
 | 
				
			||||||
 | 
					//   * USB task: immediately after transfer callback completes (critical for preventing slot exhaustion)
 | 
				
			||||||
 | 
					//   * Main loop: when transfer submission fails
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// The multi-threaded allocation is intentional for performance:
 | 
					// The multi-threaded allocation/deallocation is intentional for performance:
 | 
				
			||||||
// - USB task can immediately restart input transfers without context switching
 | 
					// - USB task can immediately restart input transfers and release slots without context switching
 | 
				
			||||||
// - Main loop controls backpressure by deciding when to restart after consuming data
 | 
					// - Main loop controls backpressure by deciding when to restart after consuming data
 | 
				
			||||||
// The atomic bitmask ensures thread-safe allocation without mutex blocking.
 | 
					// The atomic bitmask ensures thread-safe allocation/deallocation without mutex blocking.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static const char *const TAG = "usb_host";
 | 
					static const char *const TAG = "usb_host";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -52,8 +55,17 @@ static const uint8_t USB_DIR_IN = 1 << 7;
 | 
				
			|||||||
static const uint8_t USB_DIR_OUT = 0;
 | 
					static const uint8_t USB_DIR_OUT = 0;
 | 
				
			||||||
static const size_t SETUP_PACKET_SIZE = 8;
 | 
					static const size_t SETUP_PACKET_SIZE = 8;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static const size_t MAX_REQUESTS = 16;  // maximum number of outstanding requests possible.
 | 
					static const size_t MAX_REQUESTS = USB_HOST_MAX_REQUESTS;  // maximum number of outstanding requests possible.
 | 
				
			||||||
static_assert(MAX_REQUESTS <= 16, "MAX_REQUESTS must be <= 16 to fit in uint16_t bitmask");
 | 
					static_assert(MAX_REQUESTS >= 1 && MAX_REQUESTS <= 32, "MAX_REQUESTS must be between 1 and 32");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Select appropriate bitmask type for tracking allocation of TransferRequest slots.
 | 
				
			||||||
 | 
					// The bitmask must have at least as many bits as MAX_REQUESTS, so:
 | 
				
			||||||
 | 
					// - Use uint16_t for up to 16 requests (MAX_REQUESTS <= 16)
 | 
				
			||||||
 | 
					// - Use uint32_t for 17-32 requests (MAX_REQUESTS > 16)
 | 
				
			||||||
 | 
					// This is tied to the static_assert above, which enforces MAX_REQUESTS is between 1 and 32.
 | 
				
			||||||
 | 
					// If MAX_REQUESTS is increased above 32, this logic and the static_assert must be updated.
 | 
				
			||||||
 | 
					using trq_bitmask_t = std::conditional<(MAX_REQUESTS <= 16), uint16_t, uint32_t>::type;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
static constexpr size_t USB_EVENT_QUEUE_SIZE = 32;   // Size of event queue between USB task and main loop
 | 
					static constexpr size_t USB_EVENT_QUEUE_SIZE = 32;   // Size of event queue between USB task and main loop
 | 
				
			||||||
static constexpr size_t USB_TASK_STACK_SIZE = 4096;  // Stack size for USB task (same as ESP-IDF USB examples)
 | 
					static constexpr size_t USB_TASK_STACK_SIZE = 4096;  // Stack size for USB task (same as ESP-IDF USB examples)
 | 
				
			||||||
static constexpr UBaseType_t USB_TASK_PRIORITY = 5;  // Higher priority than main loop (tskIDLE_PRIORITY + 5)
 | 
					static constexpr UBaseType_t USB_TASK_PRIORITY = 5;  // Higher priority than main loop (tskIDLE_PRIORITY + 5)
 | 
				
			||||||
@@ -83,8 +95,6 @@ struct TransferRequest {
 | 
				
			|||||||
enum EventType : uint8_t {
 | 
					enum EventType : uint8_t {
 | 
				
			||||||
  EVENT_DEVICE_NEW,
 | 
					  EVENT_DEVICE_NEW,
 | 
				
			||||||
  EVENT_DEVICE_GONE,
 | 
					  EVENT_DEVICE_GONE,
 | 
				
			||||||
  EVENT_TRANSFER_COMPLETE,
 | 
					 | 
				
			||||||
  EVENT_CONTROL_COMPLETE,
 | 
					 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
struct UsbEvent {
 | 
					struct UsbEvent {
 | 
				
			||||||
@@ -96,9 +106,6 @@ struct UsbEvent {
 | 
				
			|||||||
    struct {
 | 
					    struct {
 | 
				
			||||||
      usb_device_handle_t handle;
 | 
					      usb_device_handle_t handle;
 | 
				
			||||||
    } device_gone;
 | 
					    } device_gone;
 | 
				
			||||||
    struct {
 | 
					 | 
				
			||||||
      TransferRequest *trq;
 | 
					 | 
				
			||||||
    } transfer;
 | 
					 | 
				
			||||||
  } data;
 | 
					  } data;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Required for EventPool - no cleanup needed for POD types
 | 
					  // Required for EventPool - no cleanup needed for POD types
 | 
				
			||||||
@@ -163,10 +170,9 @@ class USBClient : public Component {
 | 
				
			|||||||
  uint16_t pid_{};
 | 
					  uint16_t pid_{};
 | 
				
			||||||
  // Lock-free pool management using atomic bitmask (no dynamic allocation)
 | 
					  // Lock-free pool management using atomic bitmask (no dynamic allocation)
 | 
				
			||||||
  // Bit i = 1: requests_[i] is in use, Bit i = 0: requests_[i] is available
 | 
					  // Bit i = 1: requests_[i] is in use, Bit i = 0: requests_[i] is available
 | 
				
			||||||
  // Supports multiple concurrent consumers (both threads can allocate)
 | 
					  // Supports multiple concurrent consumers and producers (both threads can allocate/deallocate)
 | 
				
			||||||
  // Single producer for deallocation (main loop only)
 | 
					  // Bitmask type automatically selected: uint16_t for <= 16 slots, uint32_t for 17-32 slots
 | 
				
			||||||
  // Limited to 16 slots by uint16_t size (enforced by static_assert)
 | 
					  std::atomic<trq_bitmask_t> trq_in_use_;
 | 
				
			||||||
  std::atomic<uint16_t> trq_in_use_;
 | 
					 | 
				
			||||||
  TransferRequest requests_[MAX_REQUESTS]{};
 | 
					  TransferRequest requests_[MAX_REQUESTS]{};
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
class USBHost : public Component {
 | 
					class USBHost : public Component {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -228,12 +228,6 @@ void USBClient::loop() {
 | 
				
			|||||||
      case EVENT_DEVICE_GONE:
 | 
					      case EVENT_DEVICE_GONE:
 | 
				
			||||||
        this->on_removed(event->data.device_gone.handle);
 | 
					        this->on_removed(event->data.device_gone.handle);
 | 
				
			||||||
        break;
 | 
					        break;
 | 
				
			||||||
      case EVENT_TRANSFER_COMPLETE:
 | 
					 | 
				
			||||||
      case EVENT_CONTROL_COMPLETE: {
 | 
					 | 
				
			||||||
        auto *trq = event->data.transfer.trq;
 | 
					 | 
				
			||||||
        this->release_trq(trq);
 | 
					 | 
				
			||||||
        break;
 | 
					 | 
				
			||||||
      }
 | 
					 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    // Return event to pool for reuse
 | 
					    // Return event to pool for reuse
 | 
				
			||||||
    this->event_pool.release(event);
 | 
					    this->event_pool.release(event);
 | 
				
			||||||
@@ -313,25 +307,6 @@ void USBClient::on_removed(usb_device_handle_t handle) {
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// Helper to queue transfer cleanup to main loop
 | 
					 | 
				
			||||||
static void queue_transfer_cleanup(TransferRequest *trq, EventType type) {
 | 
					 | 
				
			||||||
  auto *client = trq->client;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Allocate event from pool
 | 
					 | 
				
			||||||
  UsbEvent *event = client->event_pool.allocate();
 | 
					 | 
				
			||||||
  if (event == nullptr) {
 | 
					 | 
				
			||||||
    // No events available - increment counter for periodic logging
 | 
					 | 
				
			||||||
    client->event_queue.increment_dropped_count();
 | 
					 | 
				
			||||||
    return;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  event->type = type;
 | 
					 | 
				
			||||||
  event->data.transfer.trq = trq;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Push to lock-free queue (always succeeds since pool size == queue size)
 | 
					 | 
				
			||||||
  client->event_queue.push(event);
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
// CALLBACK CONTEXT: USB task (called from usb_host_client_handle_events in USB task)
 | 
					// CALLBACK CONTEXT: USB task (called from usb_host_client_handle_events in USB task)
 | 
				
			||||||
static void control_callback(const usb_transfer_t *xfer) {
 | 
					static void control_callback(const usb_transfer_t *xfer) {
 | 
				
			||||||
  auto *trq = static_cast<TransferRequest *>(xfer->context);
 | 
					  auto *trq = static_cast<TransferRequest *>(xfer->context);
 | 
				
			||||||
@@ -346,8 +321,9 @@ static void control_callback(const usb_transfer_t *xfer) {
 | 
				
			|||||||
    trq->callback(trq->status);
 | 
					    trq->callback(trq->status);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Queue cleanup to main loop
 | 
					  // Release transfer slot immediately in USB task
 | 
				
			||||||
  queue_transfer_cleanup(trq, EVENT_CONTROL_COMPLETE);
 | 
					  // The release_trq() uses thread-safe atomic operations
 | 
				
			||||||
 | 
					  trq->client->release_trq(trq);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// THREAD CONTEXT: Called from both USB task and main loop threads (multi-consumer)
 | 
					// THREAD CONTEXT: Called from both USB task and main loop threads (multi-consumer)
 | 
				
			||||||
@@ -358,20 +334,20 @@ static void control_callback(const usb_transfer_t *xfer) {
 | 
				
			|||||||
// This multi-threaded access is intentional for performance - USB task can
 | 
					// This multi-threaded access is intentional for performance - USB task can
 | 
				
			||||||
// immediately restart transfers without waiting for main loop scheduling.
 | 
					// immediately restart transfers without waiting for main loop scheduling.
 | 
				
			||||||
TransferRequest *USBClient::get_trq_() {
 | 
					TransferRequest *USBClient::get_trq_() {
 | 
				
			||||||
  uint16_t mask = this->trq_in_use_.load(std::memory_order_relaxed);
 | 
					  trq_bitmask_t mask = this->trq_in_use_.load(std::memory_order_relaxed);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Find first available slot (bit = 0) and try to claim it atomically
 | 
					  // Find first available slot (bit = 0) and try to claim it atomically
 | 
				
			||||||
  // We use a while loop to allow retrying the same slot after CAS failure
 | 
					  // We use a while loop to allow retrying the same slot after CAS failure
 | 
				
			||||||
  size_t i = 0;
 | 
					  size_t i = 0;
 | 
				
			||||||
  while (i != MAX_REQUESTS) {
 | 
					  while (i != MAX_REQUESTS) {
 | 
				
			||||||
    if (mask & (1U << i)) {
 | 
					    if (mask & (static_cast<trq_bitmask_t>(1) << i)) {
 | 
				
			||||||
      // Slot is in use, move to next slot
 | 
					      // Slot is in use, move to next slot
 | 
				
			||||||
      i++;
 | 
					      i++;
 | 
				
			||||||
      continue;
 | 
					      continue;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    // Slot i appears available, try to claim it atomically
 | 
					    // Slot i appears available, try to claim it atomically
 | 
				
			||||||
    uint16_t desired = mask | (1U << i);  // Set bit i to mark as in-use
 | 
					    trq_bitmask_t desired = mask | (static_cast<trq_bitmask_t>(1) << i);  // Set bit i to mark as in-use
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if (this->trq_in_use_.compare_exchange_weak(mask, desired, std::memory_order_acquire, std::memory_order_relaxed)) {
 | 
					    if (this->trq_in_use_.compare_exchange_weak(mask, desired, std::memory_order_acquire, std::memory_order_relaxed)) {
 | 
				
			||||||
      // Successfully claimed slot i - prepare the TransferRequest
 | 
					      // Successfully claimed slot i - prepare the TransferRequest
 | 
				
			||||||
@@ -386,7 +362,7 @@ TransferRequest *USBClient::get_trq_() {
 | 
				
			|||||||
    i = 0;
 | 
					    i = 0;
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  ESP_LOGE(TAG, "All %d transfer slots in use", MAX_REQUESTS);
 | 
					  ESP_LOGE(TAG, "All %zu transfer slots in use", MAX_REQUESTS);
 | 
				
			||||||
  return nullptr;
 | 
					  return nullptr;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
void USBClient::disconnect() {
 | 
					void USBClient::disconnect() {
 | 
				
			||||||
@@ -452,8 +428,11 @@ static void transfer_callback(usb_transfer_t *xfer) {
 | 
				
			|||||||
    trq->callback(trq->status);
 | 
					    trq->callback(trq->status);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // Queue cleanup to main loop
 | 
					  // Release transfer slot AFTER callback completes to prevent slot exhaustion
 | 
				
			||||||
  queue_transfer_cleanup(trq, EVENT_TRANSFER_COMPLETE);
 | 
					  // This is critical for high-throughput transfers (e.g., USB UART at 115200 baud)
 | 
				
			||||||
 | 
					  // The callback has finished accessing xfer->data_buffer, so it's safe to release
 | 
				
			||||||
 | 
					  // The release_trq() uses thread-safe atomic operations
 | 
				
			||||||
 | 
					  trq->client->release_trq(trq);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
/**
 | 
					/**
 | 
				
			||||||
 * Performs a transfer input operation.
 | 
					 * Performs a transfer input operation.
 | 
				
			||||||
@@ -521,12 +500,12 @@ void USBClient::dump_config() {
 | 
				
			|||||||
                "  Product id %04X",
 | 
					                "  Product id %04X",
 | 
				
			||||||
                this->vid_, this->pid_);
 | 
					                this->vid_, this->pid_);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
// THREAD CONTEXT: Only called from main loop thread (single producer for deallocation)
 | 
					// THREAD CONTEXT: Called from both USB task and main loop threads
 | 
				
			||||||
// - Via event processing when handling EVENT_TRANSFER_COMPLETE/EVENT_CONTROL_COMPLETE
 | 
					// - USB task: Immediately after transfer callback completes
 | 
				
			||||||
// - Directly when transfer submission fails
 | 
					// - Main loop: When transfer submission fails
 | 
				
			||||||
//
 | 
					//
 | 
				
			||||||
// THREAD SAFETY: Lock-free using atomic AND to clear bit
 | 
					// THREAD SAFETY: Lock-free using atomic AND to clear bit
 | 
				
			||||||
// Single-producer pattern makes this simpler than allocation
 | 
					// Thread-safe atomic operation allows multi-threaded deallocation
 | 
				
			||||||
void USBClient::release_trq(TransferRequest *trq) {
 | 
					void USBClient::release_trq(TransferRequest *trq) {
 | 
				
			||||||
  if (trq == nullptr)
 | 
					  if (trq == nullptr)
 | 
				
			||||||
    return;
 | 
					    return;
 | 
				
			||||||
@@ -540,8 +519,8 @@ void USBClient::release_trq(TransferRequest *trq) {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  // Atomically clear bit i to mark slot as available
 | 
					  // Atomically clear bit i to mark slot as available
 | 
				
			||||||
  // fetch_and with inverted bitmask clears the bit atomically
 | 
					  // fetch_and with inverted bitmask clears the bit atomically
 | 
				
			||||||
  uint16_t bit = 1U << index;
 | 
					  trq_bitmask_t bit = static_cast<trq_bitmask_t>(1) << index;
 | 
				
			||||||
  this->trq_in_use_.fetch_and(static_cast<uint16_t>(~bit), std::memory_order_release);
 | 
					  this->trq_in_use_.fetch_and(static_cast<trq_bitmask_t>(~bit), std::memory_order_release);
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}  // namespace usb_host
 | 
					}  // namespace usb_host
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -402,8 +402,8 @@ async def to_code(config):
 | 
				
			|||||||
        add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
 | 
					        add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Disable Enterprise WiFi support if no EAP is configured
 | 
					    # Disable Enterprise WiFi support if no EAP is configured
 | 
				
			||||||
    if CORE.is_esp32 and not has_eap:
 | 
					    if CORE.is_esp32:
 | 
				
			||||||
        add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", False)
 | 
					        add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", has_eap)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
 | 
					    cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
 | 
				
			||||||
    cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
 | 
					    cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -576,8 +576,9 @@ __attribute__((noinline)) static void log_scan_result(const WiFiScanResult &res)
 | 
				
			|||||||
  format_mac_addr_upper(bssid.data(), bssid_s);
 | 
					  format_mac_addr_upper(bssid.data(), bssid_s);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  if (res.get_matches()) {
 | 
					  if (res.get_matches()) {
 | 
				
			||||||
    ESP_LOGI(TAG, "- '%s' %s" LOG_SECRET("(%s) ") "%s", res.get_ssid().c_str(), res.get_is_hidden() ? "(HIDDEN) " : "",
 | 
					    ESP_LOGI(TAG, "- '%s' %s" LOG_SECRET("(%s) ") "%s", res.get_ssid().c_str(),
 | 
				
			||||||
             bssid_s, LOG_STR_ARG(get_signal_bars(res.get_rssi())));
 | 
					             res.get_is_hidden() ? LOG_STR_LITERAL("(HIDDEN) ") : LOG_STR_LITERAL(""), bssid_s,
 | 
				
			||||||
 | 
					             LOG_STR_ARG(get_signal_bars(res.get_rssi())));
 | 
				
			||||||
    ESP_LOGD(TAG,
 | 
					    ESP_LOGD(TAG,
 | 
				
			||||||
             "    Channel: %u\n"
 | 
					             "    Channel: %u\n"
 | 
				
			||||||
             "    RSSI: %d dB",
 | 
					             "    RSSI: %d dB",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -12,7 +12,7 @@ from typing import Any
 | 
				
			|||||||
import voluptuous as vol
 | 
					import voluptuous as vol
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from esphome import core, loader, pins, yaml_util
 | 
					from esphome import core, loader, pins, yaml_util
 | 
				
			||||||
from esphome.config_helpers import Extend, Remove
 | 
					from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
 | 
				
			||||||
import esphome.config_validation as cv
 | 
					import esphome.config_validation as cv
 | 
				
			||||||
from esphome.const import (
 | 
					from esphome.const import (
 | 
				
			||||||
    CONF_ESPHOME,
 | 
					    CONF_ESPHOME,
 | 
				
			||||||
@@ -922,10 +922,9 @@ def validate_config(
 | 
				
			|||||||
    if CONF_SUBSTITUTIONS in config or command_line_substitutions:
 | 
					    if CONF_SUBSTITUTIONS in config or command_line_substitutions:
 | 
				
			||||||
        from esphome.components import substitutions
 | 
					        from esphome.components import substitutions
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        result[CONF_SUBSTITUTIONS] = {
 | 
					        result[CONF_SUBSTITUTIONS] = merge_dicts_ordered(
 | 
				
			||||||
            **(config.get(CONF_SUBSTITUTIONS) or {}),
 | 
					            config.get(CONF_SUBSTITUTIONS) or {}, command_line_substitutions
 | 
				
			||||||
            **command_line_substitutions,
 | 
					        )
 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
 | 
					        result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            substitutions.do_substitution_pass(config, command_line_substitutions)
 | 
					            substitutions.do_substitution_pass(config, command_line_substitutions)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -10,6 +10,7 @@ from esphome.const import (
 | 
				
			|||||||
    PlatformFramework,
 | 
					    PlatformFramework,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from esphome.core import CORE
 | 
					from esphome.core import CORE
 | 
				
			||||||
 | 
					from esphome.util import OrderedDict
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
 | 
					# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
 | 
				
			||||||
_PLATFORM_FRAMEWORK_LOOKUP = {
 | 
					_PLATFORM_FRAMEWORK_LOOKUP = {
 | 
				
			||||||
@@ -17,6 +18,25 @@ _PLATFORM_FRAMEWORK_LOOKUP = {
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def merge_dicts_ordered(*dicts: dict) -> OrderedDict:
 | 
				
			||||||
 | 
					    """Merge multiple dicts into an OrderedDict, preserving key order.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    This is a helper to ensure that dictionary merging preserves OrderedDict type,
 | 
				
			||||||
 | 
					    which is important for operations like move_to_end().
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        *dicts: Variable number of dictionaries to merge (later dicts override earlier ones)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        OrderedDict with merged contents
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    result = OrderedDict()
 | 
				
			||||||
 | 
					    for d in dicts:
 | 
				
			||||||
 | 
					        if d:
 | 
				
			||||||
 | 
					            result.update(d)
 | 
				
			||||||
 | 
					    return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Extend:
 | 
					class Extend:
 | 
				
			||||||
    def __init__(self, value):
 | 
					    def __init__(self, value):
 | 
				
			||||||
        self.value = value
 | 
					        self.value = value
 | 
				
			||||||
@@ -60,7 +80,11 @@ def merge_config(full_old, full_new):
 | 
				
			|||||||
        if isinstance(new, dict):
 | 
					        if isinstance(new, dict):
 | 
				
			||||||
            if not isinstance(old, dict):
 | 
					            if not isinstance(old, dict):
 | 
				
			||||||
                return new
 | 
					                return new
 | 
				
			||||||
            res = old.copy()
 | 
					            # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
 | 
				
			||||||
 | 
					            if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
 | 
				
			||||||
 | 
					                res = OrderedDict(old)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                res = old.copy()
 | 
				
			||||||
            for k, v in new.items():
 | 
					            for k, v in new.items():
 | 
				
			||||||
                if isinstance(v, Remove) and k in old:
 | 
					                if isinstance(v, Remove) and k in old:
 | 
				
			||||||
                    del res[k]
 | 
					                    del res[k]
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -244,6 +244,20 @@ RESERVED_IDS = [
 | 
				
			|||||||
    "uart0",
 | 
					    "uart0",
 | 
				
			||||||
    "uart1",
 | 
					    "uart1",
 | 
				
			||||||
    "uart2",
 | 
					    "uart2",
 | 
				
			||||||
 | 
					    # ESP32 ROM functions
 | 
				
			||||||
 | 
					    "crc16_be",
 | 
				
			||||||
 | 
					    "crc16_le",
 | 
				
			||||||
 | 
					    "crc32_be",
 | 
				
			||||||
 | 
					    "crc32_le",
 | 
				
			||||||
 | 
					    "crc8_be",
 | 
				
			||||||
 | 
					    "crc8_le",
 | 
				
			||||||
 | 
					    "dbg_state",
 | 
				
			||||||
 | 
					    "debug_timer",
 | 
				
			||||||
 | 
					    "one_bits",
 | 
				
			||||||
 | 
					    "recv_packet",
 | 
				
			||||||
 | 
					    "send_packet",
 | 
				
			||||||
 | 
					    "check_pos",
 | 
				
			||||||
 | 
					    "software_reset",
 | 
				
			||||||
]
 | 
					]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -4,7 +4,7 @@ from enum import Enum
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from esphome.enum import StrEnum
 | 
					from esphome.enum import StrEnum
 | 
				
			||||||
 | 
					
 | 
				
			||||||
__version__ = "2025.10.0b1"
 | 
					__version__ = "2025.10.3"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
 | 
					ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
 | 
				
			||||||
VALID_SUBSTITUTIONS_CHARACTERS = (
 | 
					VALID_SUBSTITUTIONS_CHARACTERS = (
 | 
				
			||||||
@@ -696,6 +696,7 @@ CONF_OPEN_DRAIN = "open_drain"
 | 
				
			|||||||
CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt"
 | 
					CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt"
 | 
				
			||||||
CONF_OPEN_DURATION = "open_duration"
 | 
					CONF_OPEN_DURATION = "open_duration"
 | 
				
			||||||
CONF_OPEN_ENDSTOP = "open_endstop"
 | 
					CONF_OPEN_ENDSTOP = "open_endstop"
 | 
				
			||||||
 | 
					CONF_OPENTHREAD = "openthread"
 | 
				
			||||||
CONF_OPERATION = "operation"
 | 
					CONF_OPERATION = "operation"
 | 
				
			||||||
CONF_OPTIMISTIC = "optimistic"
 | 
					CONF_OPTIMISTIC = "optimistic"
 | 
				
			||||||
CONF_OPTION = "option"
 | 
					CONF_OPTION = "option"
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,6 +11,7 @@ from esphome.const import (
 | 
				
			|||||||
    CONF_COMMENT,
 | 
					    CONF_COMMENT,
 | 
				
			||||||
    CONF_ESPHOME,
 | 
					    CONF_ESPHOME,
 | 
				
			||||||
    CONF_ETHERNET,
 | 
					    CONF_ETHERNET,
 | 
				
			||||||
 | 
					    CONF_OPENTHREAD,
 | 
				
			||||||
    CONF_PORT,
 | 
					    CONF_PORT,
 | 
				
			||||||
    CONF_USE_ADDRESS,
 | 
					    CONF_USE_ADDRESS,
 | 
				
			||||||
    CONF_WEB_SERVER,
 | 
					    CONF_WEB_SERVER,
 | 
				
			||||||
@@ -529,6 +530,8 @@ class EsphomeCore:
 | 
				
			|||||||
        self.dashboard = False
 | 
					        self.dashboard = False
 | 
				
			||||||
        # True if command is run from vscode api
 | 
					        # True if command is run from vscode api
 | 
				
			||||||
        self.vscode = False
 | 
					        self.vscode = False
 | 
				
			||||||
 | 
					        # True if running in testing mode (disables validation checks for grouped testing)
 | 
				
			||||||
 | 
					        self.testing_mode = False
 | 
				
			||||||
        # The name of the node
 | 
					        # The name of the node
 | 
				
			||||||
        self.name: str | None = None
 | 
					        self.name: str | None = None
 | 
				
			||||||
        # The friendly name of the node
 | 
					        # The friendly name of the node
 | 
				
			||||||
@@ -639,6 +642,9 @@ class EsphomeCore:
 | 
				
			|||||||
        if CONF_ETHERNET in self.config:
 | 
					        if CONF_ETHERNET in self.config:
 | 
				
			||||||
            return self.config[CONF_ETHERNET][CONF_USE_ADDRESS]
 | 
					            return self.config[CONF_ETHERNET][CONF_USE_ADDRESS]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if CONF_OPENTHREAD in self.config:
 | 
				
			||||||
 | 
					            return f"{self.name}.local"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return None
 | 
					        return None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -84,6 +84,7 @@
 | 
				
			|||||||
#define USE_LVGL_TOUCHSCREEN
 | 
					#define USE_LVGL_TOUCHSCREEN
 | 
				
			||||||
#define USE_MDNS
 | 
					#define USE_MDNS
 | 
				
			||||||
#define MDNS_SERVICE_COUNT 3
 | 
					#define MDNS_SERVICE_COUNT 3
 | 
				
			||||||
 | 
					#define MDNS_DYNAMIC_TXT_COUNT 3
 | 
				
			||||||
#define USE_MEDIA_PLAYER
 | 
					#define USE_MEDIA_PLAYER
 | 
				
			||||||
#define USE_NEXTION_TFT_UPLOAD
 | 
					#define USE_NEXTION_TFT_UPLOAD
 | 
				
			||||||
#define USE_NUMBER
 | 
					#define USE_NUMBER
 | 
				
			||||||
@@ -190,6 +191,7 @@
 | 
				
			|||||||
#define USE_WEBSERVER_PORT 80  // NOLINT
 | 
					#define USE_WEBSERVER_PORT 80  // NOLINT
 | 
				
			||||||
#define USE_WEBSERVER_SORTING
 | 
					#define USE_WEBSERVER_SORTING
 | 
				
			||||||
#define USE_WIFI_11KV_SUPPORT
 | 
					#define USE_WIFI_11KV_SUPPORT
 | 
				
			||||||
 | 
					#define USB_HOST_MAX_REQUESTS 16
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#ifdef USE_ARDUINO
 | 
					#ifdef USE_ARDUINO
 | 
				
			||||||
#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
 | 
					#define USE_ARDUINO_VERSION_CODE VERSION_CODE(3, 2, 1)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -246,12 +246,15 @@ def entity_duplicate_validator(platform: str) -> Callable[[ConfigType], ConfigTy
 | 
				
			|||||||
                    "\n          to distinguish them"
 | 
					                    "\n          to distinguish them"
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            raise cv.Invalid(
 | 
					            # Skip duplicate entity name validation when testing_mode is enabled
 | 
				
			||||||
                f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
 | 
					            # This flag is used for grouped component testing
 | 
				
			||||||
                f"{conflict_msg}. "
 | 
					            if not CORE.testing_mode:
 | 
				
			||||||
                "Each entity on a device must have a unique name within its platform."
 | 
					                raise cv.Invalid(
 | 
				
			||||||
                f"{sanitized_msg}"
 | 
					                    f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
 | 
				
			||||||
            )
 | 
					                    f"{conflict_msg}. "
 | 
				
			||||||
 | 
					                    "Each entity on a device must have a unique name within its platform."
 | 
				
			||||||
 | 
					                    f"{sanitized_msg}"
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Store metadata about this entity
 | 
					        # Store metadata about this entity
 | 
				
			||||||
        entity_metadata: EntityMetadata = {
 | 
					        entity_metadata: EntityMetadata = {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -10,6 +10,10 @@ from esphome.helpers import get_bool_env
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
from .util.password import password_hash
 | 
					from .util.password import password_hash
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Sentinel file name used for CORE.config_path when dashboard initializes.
 | 
				
			||||||
 | 
					# This ensures .parent returns the config directory instead of root.
 | 
				
			||||||
 | 
					_DASHBOARD_SENTINEL_FILE = "___DASHBOARD_SENTINEL___.yaml"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class DashboardSettings:
 | 
					class DashboardSettings:
 | 
				
			||||||
    """Settings for the dashboard."""
 | 
					    """Settings for the dashboard."""
 | 
				
			||||||
@@ -48,7 +52,12 @@ class DashboardSettings:
 | 
				
			|||||||
        self.config_dir = Path(args.configuration)
 | 
					        self.config_dir = Path(args.configuration)
 | 
				
			||||||
        self.absolute_config_dir = self.config_dir.resolve()
 | 
					        self.absolute_config_dir = self.config_dir.resolve()
 | 
				
			||||||
        self.verbose = args.verbose
 | 
					        self.verbose = args.verbose
 | 
				
			||||||
        CORE.config_path = self.config_dir / "."
 | 
					        # Set to a sentinel file so .parent gives us the config directory.
 | 
				
			||||||
 | 
					        # Previously this was `os.path.join(self.config_dir, ".")` which worked because
 | 
				
			||||||
 | 
					        # os.path.dirname("/config/.") returns "/config", but Path("/config/.").parent
 | 
				
			||||||
 | 
					        # normalizes to Path("/config") first, then .parent returns Path("/"), breaking
 | 
				
			||||||
 | 
					        # secret resolution. Using a sentinel file ensures .parent gives the correct directory.
 | 
				
			||||||
 | 
					        CORE.config_path = self.config_dir / _DASHBOARD_SENTINEL_FILE
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
    def relative_url(self) -> str:
 | 
					    def relative_url(self) -> str:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
 | 
				
			|||||||
            "download",
 | 
					            "download",
 | 
				
			||||||
            f"{storage_json.name}-{file_name}",
 | 
					            f"{storage_json.name}-{file_name}",
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        path = storage_json.firmware_bin_path.with_name(file_name)
 | 
					
 | 
				
			||||||
 | 
					        path = storage_json.firmware_bin_path.parent.joinpath(file_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if not path.is_file():
 | 
					        if not path.is_file():
 | 
				
			||||||
            args = ["esphome", "idedata", settings.rel_path(configuration)]
 | 
					            args = ["esphome", "idedata", settings.rel_path(configuration)]
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -242,7 +242,7 @@ def send_check(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def perform_ota(
 | 
					def perform_ota(
 | 
				
			||||||
    sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path
 | 
					    sock: socket.socket, password: str | None, file_handle: io.IOBase, filename: Path
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
    file_contents = file_handle.read()
 | 
					    file_contents = file_handle.read()
 | 
				
			||||||
    file_size = len(file_contents)
 | 
					    file_size = len(file_contents)
 | 
				
			||||||
@@ -278,13 +278,13 @@ def perform_ota(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    def perform_auth(
 | 
					    def perform_auth(
 | 
				
			||||||
        sock: socket.socket,
 | 
					        sock: socket.socket,
 | 
				
			||||||
        password: str,
 | 
					        password: str | None,
 | 
				
			||||||
        hash_func: Callable[..., Any],
 | 
					        hash_func: Callable[..., Any],
 | 
				
			||||||
        nonce_size: int,
 | 
					        nonce_size: int,
 | 
				
			||||||
        hash_name: str,
 | 
					        hash_name: str,
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        """Perform challenge-response authentication using specified hash algorithm."""
 | 
					        """Perform challenge-response authentication using specified hash algorithm."""
 | 
				
			||||||
        if not password:
 | 
					        if password is None:
 | 
				
			||||||
            raise OTAError("ESP requests password, but no password given!")
 | 
					            raise OTAError("ESP requests password, but no password given!")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        nonce_bytes = receive_exactly(
 | 
					        nonce_bytes = receive_exactly(
 | 
				
			||||||
@@ -385,7 +385,7 @@ def perform_ota(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_ota_impl_(
 | 
					def run_ota_impl_(
 | 
				
			||||||
    remote_host: str | list[str], remote_port: int, password: str, filename: Path
 | 
					    remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
 | 
				
			||||||
) -> tuple[int, str | None]:
 | 
					) -> tuple[int, str | None]:
 | 
				
			||||||
    from esphome.core import CORE
 | 
					    from esphome.core import CORE
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -410,7 +410,7 @@ def run_ota_impl_(
 | 
				
			|||||||
        af, socktype, _, _, sa = r
 | 
					        af, socktype, _, _, sa = r
 | 
				
			||||||
        _LOGGER.info("Connecting to %s port %s...", sa[0], sa[1])
 | 
					        _LOGGER.info("Connecting to %s port %s...", sa[0], sa[1])
 | 
				
			||||||
        sock = socket.socket(af, socktype)
 | 
					        sock = socket.socket(af, socktype)
 | 
				
			||||||
        sock.settimeout(10.0)
 | 
					        sock.settimeout(20.0)
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            sock.connect(sa)
 | 
					            sock.connect(sa)
 | 
				
			||||||
        except OSError as err:
 | 
					        except OSError as err:
 | 
				
			||||||
@@ -436,7 +436,7 @@ def run_ota_impl_(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_ota(
 | 
					def run_ota(
 | 
				
			||||||
    remote_host: str | list[str], remote_port: int, password: str, filename: Path
 | 
					    remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
 | 
				
			||||||
) -> tuple[int, str | None]:
 | 
					) -> tuple[int, str | None]:
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        return run_ota_impl_(remote_host, remote_port, password, filename)
 | 
					        return run_ota_impl_(remote_host, remote_port, password, filename)
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -224,36 +224,37 @@ def resolve_ip_address(
 | 
				
			|||||||
        return res
 | 
					        return res
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Process hosts
 | 
					    # Process hosts
 | 
				
			||||||
    cached_addresses: list[str] = []
 | 
					
 | 
				
			||||||
    uncached_hosts: list[str] = []
 | 
					    uncached_hosts: list[str] = []
 | 
				
			||||||
    has_cache = address_cache is not None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    for h in hosts:
 | 
					    for h in hosts:
 | 
				
			||||||
        if is_ip_address(h):
 | 
					        if is_ip_address(h):
 | 
				
			||||||
            if has_cache:
 | 
					            _add_ip_addresses_to_addrinfo([h], port, res)
 | 
				
			||||||
                # If we have a cache, treat IPs as cached
 | 
					 | 
				
			||||||
                cached_addresses.append(h)
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # If no cache, pass IPs through to resolver with hostnames
 | 
					 | 
				
			||||||
                uncached_hosts.append(h)
 | 
					 | 
				
			||||||
        elif address_cache and (cached := address_cache.get_addresses(h)):
 | 
					        elif address_cache and (cached := address_cache.get_addresses(h)):
 | 
				
			||||||
            # Found in cache
 | 
					            _add_ip_addresses_to_addrinfo(cached, port, res)
 | 
				
			||||||
            cached_addresses.extend(cached)
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # Not cached, need to resolve
 | 
					            # Not cached, need to resolve
 | 
				
			||||||
            if address_cache and address_cache.has_cache():
 | 
					            if address_cache and address_cache.has_cache():
 | 
				
			||||||
                _LOGGER.info("Host %s not in cache, will need to resolve", h)
 | 
					                _LOGGER.info("Host %s not in cache, will need to resolve", h)
 | 
				
			||||||
            uncached_hosts.append(h)
 | 
					            uncached_hosts.append(h)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Process cached addresses (includes direct IPs and cached lookups)
 | 
					 | 
				
			||||||
    _add_ip_addresses_to_addrinfo(cached_addresses, port, res)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # If we have uncached hosts (only non-IP hostnames), resolve them
 | 
					    # If we have uncached hosts (only non-IP hostnames), resolve them
 | 
				
			||||||
    if uncached_hosts:
 | 
					    if uncached_hosts:
 | 
				
			||||||
 | 
					        from aioesphomeapi.host_resolver import AddrInfo as AioAddrInfo
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        from esphome.core import EsphomeError
 | 
				
			||||||
        from esphome.resolver import AsyncResolver
 | 
					        from esphome.resolver import AsyncResolver
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        resolver = AsyncResolver(uncached_hosts, port)
 | 
					        resolver = AsyncResolver(uncached_hosts, port)
 | 
				
			||||||
        addr_infos = resolver.resolve()
 | 
					        addr_infos: list[AioAddrInfo] = []
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
 | 
					            addr_infos = resolver.resolve()
 | 
				
			||||||
 | 
					        except EsphomeError as err:
 | 
				
			||||||
 | 
					            if not res:
 | 
				
			||||||
 | 
					                # No pre-resolved addresses available, DNS resolution is fatal
 | 
				
			||||||
 | 
					                raise
 | 
				
			||||||
 | 
					            _LOGGER.info("%s (using %d already resolved IP addresses)", err, len(res))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Convert aioesphomeapi AddrInfo to our format
 | 
					        # Convert aioesphomeapi AddrInfo to our format
 | 
				
			||||||
        for addr_info in addr_infos:
 | 
					        for addr_info in addr_infos:
 | 
				
			||||||
            sockaddr = addr_info.sockaddr
 | 
					            sockaddr = addr_info.sockaddr
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -118,11 +118,11 @@ class PinRegistry(dict):
 | 
				
			|||||||
                        parent_config = fconf.get_config_for_path(parent_path)
 | 
					                        parent_config = fconf.get_config_for_path(parent_path)
 | 
				
			||||||
                        final_val_fun(pin_config, parent_config)
 | 
					                        final_val_fun(pin_config, parent_config)
 | 
				
			||||||
                    allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
 | 
					                    allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
 | 
				
			||||||
                    if count != 1 and not allow_others:
 | 
					                    if count != 1 and not allow_others and not CORE.testing_mode:
 | 
				
			||||||
                        raise cv.Invalid(
 | 
					                        raise cv.Invalid(
 | 
				
			||||||
                            f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
 | 
					                            f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
 | 
				
			||||||
                        )
 | 
					                        )
 | 
				
			||||||
                    if count == 1 and allow_others:
 | 
					                    if count == 1 and allow_others and not CORE.testing_mode:
 | 
				
			||||||
                        raise cv.Invalid(
 | 
					                        raise cv.Invalid(
 | 
				
			||||||
                            f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
 | 
					                            f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
 | 
				
			||||||
                        )
 | 
					                        )
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -5,6 +5,7 @@ import os
 | 
				
			|||||||
from pathlib import Path
 | 
					from pathlib import Path
 | 
				
			||||||
import re
 | 
					import re
 | 
				
			||||||
import subprocess
 | 
					import subprocess
 | 
				
			||||||
 | 
					from typing import Any
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
 | 
					from esphome.const import CONF_COMPILE_PROCESS_LIMIT, CONF_ESPHOME, KEY_CORE
 | 
				
			||||||
from esphome.core import CORE, EsphomeError
 | 
					from esphome.core import CORE, EsphomeError
 | 
				
			||||||
@@ -42,6 +43,35 @@ def patch_structhash():
 | 
				
			|||||||
    cli.clean_build_dir = patched_clean_build_dir
 | 
					    cli.clean_build_dir = patched_clean_build_dir
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def patch_file_downloader():
 | 
				
			||||||
 | 
					    """Patch PlatformIO's FileDownloader to retry on PackageException errors."""
 | 
				
			||||||
 | 
					    from platformio.package.download import FileDownloader
 | 
				
			||||||
 | 
					    from platformio.package.exception import PackageException
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    original_init = FileDownloader.__init__
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def patched_init(self, *args: Any, **kwargs: Any) -> None:
 | 
				
			||||||
 | 
					        max_retries = 3
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        for attempt in range(max_retries):
 | 
				
			||||||
 | 
					            try:
 | 
				
			||||||
 | 
					                return original_init(self, *args, **kwargs)
 | 
				
			||||||
 | 
					            except PackageException as e:
 | 
				
			||||||
 | 
					                if attempt < max_retries - 1:
 | 
				
			||||||
 | 
					                    _LOGGER.warning(
 | 
				
			||||||
 | 
					                        "Package download failed: %s. Retrying... (attempt %d/%d)",
 | 
				
			||||||
 | 
					                        str(e),
 | 
				
			||||||
 | 
					                        attempt + 1,
 | 
				
			||||||
 | 
					                        max_retries,
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    # Final attempt - re-raise
 | 
				
			||||||
 | 
					                    raise
 | 
				
			||||||
 | 
					        return None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    FileDownloader.__init__ = patched_init
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
 | 
					IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
 | 
				
			||||||
FILTER_PLATFORMIO_LINES = [
 | 
					FILTER_PLATFORMIO_LINES = [
 | 
				
			||||||
    r"Verbose mode can be enabled via `-v, --verbose` option.*",
 | 
					    r"Verbose mode can be enabled via `-v, --verbose` option.*",
 | 
				
			||||||
@@ -99,6 +129,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
 | 
				
			|||||||
    import platformio.__main__
 | 
					    import platformio.__main__
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    patch_structhash()
 | 
					    patch_structhash()
 | 
				
			||||||
 | 
					    patch_file_downloader()
 | 
				
			||||||
    return run_external_command(platformio.__main__.main, *cmd, **kwargs)
 | 
					    return run_external_command(platformio.__main__.main, *cmd, **kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -15,6 +15,8 @@ from esphome.const import (
 | 
				
			|||||||
from esphome.core import CORE, EsphomeError
 | 
					from esphome.core import CORE, EsphomeError
 | 
				
			||||||
from esphome.helpers import (
 | 
					from esphome.helpers import (
 | 
				
			||||||
    copy_file_if_changed,
 | 
					    copy_file_if_changed,
 | 
				
			||||||
 | 
					    get_str_env,
 | 
				
			||||||
 | 
					    is_ha_addon,
 | 
				
			||||||
    read_file,
 | 
					    read_file,
 | 
				
			||||||
    walk_files,
 | 
					    walk_files,
 | 
				
			||||||
    write_file_if_changed,
 | 
					    write_file_if_changed,
 | 
				
			||||||
@@ -338,16 +340,21 @@ def clean_build():
 | 
				
			|||||||
def clean_all(configuration: list[str]):
 | 
					def clean_all(configuration: list[str]):
 | 
				
			||||||
    import shutil
 | 
					    import shutil
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Clean entire build dir
 | 
					    data_dirs = [Path(dir) / ".esphome" for dir in configuration]
 | 
				
			||||||
    for dir in configuration:
 | 
					    if is_ha_addon():
 | 
				
			||||||
        build_dir = Path(dir) / ".esphome"
 | 
					        data_dirs.append(Path("/data"))
 | 
				
			||||||
        if build_dir.is_dir():
 | 
					    if "ESPHOME_DATA_DIR" in os.environ:
 | 
				
			||||||
            _LOGGER.info("Cleaning %s", build_dir)
 | 
					        data_dirs.append(Path(get_str_env("ESPHOME_DATA_DIR", None)))
 | 
				
			||||||
            # Don't remove storage as it will cause the dashboard to regenerate all configs
 | 
					
 | 
				
			||||||
            for item in build_dir.iterdir():
 | 
					    # Clean build dir
 | 
				
			||||||
                if item.is_file():
 | 
					    for dir in data_dirs:
 | 
				
			||||||
 | 
					        if dir.is_dir():
 | 
				
			||||||
 | 
					            _LOGGER.info("Cleaning %s", dir)
 | 
				
			||||||
 | 
					            # Don't remove storage or .json files which are needed by the dashboard
 | 
				
			||||||
 | 
					            for item in dir.iterdir():
 | 
				
			||||||
 | 
					                if item.is_file() and not item.name.endswith(".json"):
 | 
				
			||||||
                    item.unlink()
 | 
					                    item.unlink()
 | 
				
			||||||
                elif item.name != "storage" and item.is_dir():
 | 
					                elif item.is_dir() and item.name != "storage":
 | 
				
			||||||
                    shutil.rmtree(item)
 | 
					                    shutil.rmtree(item)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Clean PlatformIO project files
 | 
					    # Clean PlatformIO project files
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,3 +1,4 @@
 | 
				
			|||||||
[build]
 | 
					[build]
 | 
				
			||||||
  command = "script/build-api-docs"
 | 
					  command = "script/build-api-docs"
 | 
				
			||||||
  publish = "api-docs"
 | 
					  publish = "api-docs"
 | 
				
			||||||
 | 
					  environment = { PYTHON_VERSION = "3.13" }
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -11,8 +11,8 @@ pyserial==3.5
 | 
				
			|||||||
platformio==6.1.18  # When updating platformio, also update /docker/Dockerfile
 | 
					platformio==6.1.18  # When updating platformio, also update /docker/Dockerfile
 | 
				
			||||||
esptool==5.1.0
 | 
					esptool==5.1.0
 | 
				
			||||||
click==8.1.7
 | 
					click==8.1.7
 | 
				
			||||||
esphome-dashboard==20250904.0
 | 
					esphome-dashboard==20251013.0
 | 
				
			||||||
aioesphomeapi==41.13.0
 | 
					aioesphomeapi==41.16.1
 | 
				
			||||||
zeroconf==0.148.0
 | 
					zeroconf==0.148.0
 | 
				
			||||||
puremagic==1.30
 | 
					puremagic==1.30
 | 
				
			||||||
ruamel.yaml==0.18.15 # dashboard_import
 | 
					ruamel.yaml==0.18.15 # dashboard_import
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										523
									
								
								script/analyze_component_buses.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										523
									
								
								script/analyze_component_buses.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,523 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					"""Analyze component test files to detect which common bus configs they use.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This script scans component test files and extracts which common bus configurations
 | 
				
			||||||
 | 
					(i2c, spi, uart, etc.) are included via the packages mechanism. This information
 | 
				
			||||||
 | 
					is used to group components that can be tested together.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Components can only be grouped together if they use the EXACT SAME set of common
 | 
				
			||||||
 | 
					bus configurations, ensuring that merged configs are compatible.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Example output:
 | 
				
			||||||
 | 
					{
 | 
				
			||||||
 | 
					    "component1": {
 | 
				
			||||||
 | 
					        "esp32-ard": ["i2c", "uart_19200"],
 | 
				
			||||||
 | 
					        "esp32-idf": ["i2c", "uart_19200"]
 | 
				
			||||||
 | 
					    },
 | 
				
			||||||
 | 
					    "component2": {
 | 
				
			||||||
 | 
					        "esp32-ard": ["spi"],
 | 
				
			||||||
 | 
					        "esp32-idf": ["spi"]
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					from functools import lru_cache
 | 
				
			||||||
 | 
					import json
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					from typing import Any
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Add esphome to path
 | 
				
			||||||
 | 
					sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from esphome import yaml_util
 | 
				
			||||||
 | 
					from esphome.config_helpers import Extend, Remove
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Path to common bus configs
 | 
				
			||||||
 | 
					COMMON_BUS_PATH = Path("tests/test_build_components/common")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Package dependencies - maps packages to the packages they include
 | 
				
			||||||
 | 
					# When a component uses a package on the left, it automatically gets
 | 
				
			||||||
 | 
					# the packages on the right as well
 | 
				
			||||||
 | 
					PACKAGE_DEPENDENCIES = {
 | 
				
			||||||
 | 
					    "modbus": ["uart"],  # modbus packages include uart packages
 | 
				
			||||||
 | 
					    # Add more package dependencies here as needed
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Bus types that can be defined directly in config files
 | 
				
			||||||
 | 
					# Components defining these directly cannot be grouped (they create unique bus IDs)
 | 
				
			||||||
 | 
					DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Signature for components with no bus requirements
 | 
				
			||||||
 | 
					# These components can be merged with any other group
 | 
				
			||||||
 | 
					NO_BUSES_SIGNATURE = "no_buses"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Base bus components - these ARE the bus implementations and should not
 | 
				
			||||||
 | 
					# be flagged as needing migration since they are the platform/base components
 | 
				
			||||||
 | 
					BASE_BUS_COMPONENTS = {
 | 
				
			||||||
 | 
					    "i2c",
 | 
				
			||||||
 | 
					    "spi",
 | 
				
			||||||
 | 
					    "uart",
 | 
				
			||||||
 | 
					    "modbus",
 | 
				
			||||||
 | 
					    "canbus",
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Components that must be tested in isolation (not grouped or batched with others)
 | 
				
			||||||
 | 
					# These have known build issues that prevent grouping
 | 
				
			||||||
 | 
					# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py
 | 
				
			||||||
 | 
					ISOLATED_COMPONENTS = {
 | 
				
			||||||
 | 
					    "animation": "Has display lambda in common.yaml that requires existing display platform - breaks when merged without display",
 | 
				
			||||||
 | 
					    "esphome": "Defines devices/areas in esphome: section that are referenced in other sections - breaks when merged",
 | 
				
			||||||
 | 
					    "ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
 | 
				
			||||||
 | 
					    "ethernet_info": "Related to ethernet component which conflicts with wifi",
 | 
				
			||||||
 | 
					    "lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
 | 
				
			||||||
 | 
					    "matrix_keypad": "Needs isolation due to keypad",
 | 
				
			||||||
 | 
					    "mcp4725": "no YAML config to specify i2c bus id",
 | 
				
			||||||
 | 
					    "mcp47a1": "no YAML config to specify i2c bus id",
 | 
				
			||||||
 | 
					    "modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
 | 
				
			||||||
 | 
					    "neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
 | 
				
			||||||
 | 
					    "packages": "cannot merge packages",
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@lru_cache(maxsize=1)
 | 
				
			||||||
 | 
					def get_common_bus_packages() -> frozenset[str]:
 | 
				
			||||||
 | 
					    """Get the list of common bus package names.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Reads from tests/test_build_components/common/ directory
 | 
				
			||||||
 | 
					    and caches the result. All bus types support component grouping
 | 
				
			||||||
 | 
					    for config validation since --testing-mode bypasses runtime conflicts.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Frozenset of common bus package names (i2c, spi, uart, etc.)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not COMMON_BUS_PATH.exists():
 | 
				
			||||||
 | 
					        return frozenset()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # List all directories in common/ - these are the bus package names
 | 
				
			||||||
 | 
					    return frozenset(d.name for d in COMMON_BUS_PATH.iterdir() if d.is_dir())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def uses_local_file_references(component_dir: Path) -> bool:
 | 
				
			||||||
 | 
					    """Check if a component uses local file references via $component_dir.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Components that reference local files cannot be grouped because each needs
 | 
				
			||||||
 | 
					    a unique component_dir path pointing to their specific directory.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_dir: Path to the component's test directory
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        True if the component uses $component_dir for local file references
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    common_yaml = component_dir / "common.yaml"
 | 
				
			||||||
 | 
					    if not common_yaml.exists():
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        content = common_yaml.read_text()
 | 
				
			||||||
 | 
					    except Exception:  # pylint: disable=broad-exception-caught
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Pattern to match $component_dir or ${component_dir} references
 | 
				
			||||||
 | 
					    # These indicate local file usage that prevents grouping
 | 
				
			||||||
 | 
					    return bool(re.search(r"\$\{?component_dir\}?", content))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def is_platform_component(component_dir: Path) -> bool:
 | 
				
			||||||
 | 
					    """Check if a component is a platform component (abstract base class).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Platform components have IS_PLATFORM_COMPONENT = True and cannot be
 | 
				
			||||||
 | 
					    instantiated without a platform-specific implementation. These components
 | 
				
			||||||
 | 
					    define abstract methods and cause linker errors if compiled standalone.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Examples: canbus, mcp23x08_base, mcp23x17_base
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_dir: Path to the component's test directory
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        True if this is a platform component
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Check in the actual component source, not tests
 | 
				
			||||||
 | 
					    # tests/components/X -> tests/components -> tests -> repo root
 | 
				
			||||||
 | 
					    repo_root = component_dir.parent.parent.parent
 | 
				
			||||||
 | 
					    comp_init = (
 | 
				
			||||||
 | 
					        repo_root / "esphome" / "components" / component_dir.name / "__init__.py"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not comp_init.exists():
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        content = comp_init.read_text()
 | 
				
			||||||
 | 
					        return "IS_PLATFORM_COMPONENT = True" in content
 | 
				
			||||||
 | 
					    except Exception:  # pylint: disable=broad-exception-caught
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def _contains_extend_or_remove(data: Any) -> bool:
 | 
				
			||||||
 | 
					    """Recursively check if data contains Extend or Remove objects.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        data: Parsed YAML data structure
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        True if any Extend or Remove objects are found
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if isinstance(data, (Extend, Remove)):
 | 
				
			||||||
 | 
					        return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if isinstance(data, dict):
 | 
				
			||||||
 | 
					        for value in data.values():
 | 
				
			||||||
 | 
					            if _contains_extend_or_remove(value):
 | 
				
			||||||
 | 
					                return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if isinstance(data, list):
 | 
				
			||||||
 | 
					        for item in data:
 | 
				
			||||||
 | 
					            if _contains_extend_or_remove(item):
 | 
				
			||||||
 | 
					                return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def analyze_yaml_file(yaml_file: Path) -> dict[str, Any]:
 | 
				
			||||||
 | 
					    """Load a YAML file once and extract all needed information.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    This loads the YAML file a single time and extracts all information needed
 | 
				
			||||||
 | 
					    for component analysis, avoiding multiple file reads.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        yaml_file: Path to the YAML file to analyze
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Dictionary with keys:
 | 
				
			||||||
 | 
					        - buses: set of common bus package names
 | 
				
			||||||
 | 
					        - has_extend_remove: bool indicating if Extend/Remove objects are present
 | 
				
			||||||
 | 
					        - has_direct_bus_config: bool indicating if buses are defined directly (not via packages)
 | 
				
			||||||
 | 
					        - loaded: bool indicating if file was successfully loaded
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    result = {
 | 
				
			||||||
 | 
					        "buses": set(),
 | 
				
			||||||
 | 
					        "has_extend_remove": False,
 | 
				
			||||||
 | 
					        "has_direct_bus_config": False,
 | 
				
			||||||
 | 
					        "loaded": False,
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not yaml_file.exists():
 | 
				
			||||||
 | 
					        return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        data = yaml_util.load_yaml(yaml_file)
 | 
				
			||||||
 | 
					        result["loaded"] = True
 | 
				
			||||||
 | 
					    except Exception:  # pylint: disable=broad-exception-caught
 | 
				
			||||||
 | 
					        return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Check for Extend/Remove objects
 | 
				
			||||||
 | 
					    result["has_extend_remove"] = _contains_extend_or_remove(data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Check if buses are defined directly (not via packages)
 | 
				
			||||||
 | 
					    # Components that define i2c, spi, uart, or modbus directly in test files
 | 
				
			||||||
 | 
					    # cannot be grouped because they create unique bus IDs
 | 
				
			||||||
 | 
					    if isinstance(data, dict):
 | 
				
			||||||
 | 
					        for bus_type in DIRECT_BUS_TYPES:
 | 
				
			||||||
 | 
					            if bus_type in data:
 | 
				
			||||||
 | 
					                result["has_direct_bus_config"] = True
 | 
				
			||||||
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Extract common bus packages
 | 
				
			||||||
 | 
					    if not isinstance(data, dict) or "packages" not in data:
 | 
				
			||||||
 | 
					        return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    packages = data["packages"]
 | 
				
			||||||
 | 
					    if not isinstance(packages, dict):
 | 
				
			||||||
 | 
					        return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    valid_buses = get_common_bus_packages()
 | 
				
			||||||
 | 
					    for pkg_name in packages:
 | 
				
			||||||
 | 
					        if pkg_name not in valid_buses:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					        result["buses"].add(pkg_name)
 | 
				
			||||||
 | 
					        # Add any package dependencies (e.g., modbus includes uart)
 | 
				
			||||||
 | 
					        if pkg_name not in PACKAGE_DEPENDENCIES:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					        for dep in PACKAGE_DEPENDENCIES[pkg_name]:
 | 
				
			||||||
 | 
					            if dep not in valid_buses:
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					            result["buses"].add(dep)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def analyze_component(component_dir: Path) -> tuple[dict[str, list[str]], bool, bool]:
 | 
				
			||||||
 | 
					    """Analyze a component directory to find which buses each platform uses.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_dir: Path to the component's test directory
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of:
 | 
				
			||||||
 | 
					        - Dictionary mapping platform to list of bus configs
 | 
				
			||||||
 | 
					          Example: {"esp32-ard": ["i2c", "spi"], "esp32-idf": ["i2c"]}
 | 
				
			||||||
 | 
					        - Boolean indicating if component uses !extend or !remove
 | 
				
			||||||
 | 
					        - Boolean indicating if component defines buses directly (not via packages)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not component_dir.is_dir():
 | 
				
			||||||
 | 
					        return {}, False, False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    platform_buses = {}
 | 
				
			||||||
 | 
					    has_extend_remove = False
 | 
				
			||||||
 | 
					    has_direct_bus_config = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Analyze all YAML files in the component directory
 | 
				
			||||||
 | 
					    for yaml_file in component_dir.glob("*.yaml"):
 | 
				
			||||||
 | 
					        analysis = analyze_yaml_file(yaml_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Track if any file uses extend/remove
 | 
				
			||||||
 | 
					        if analysis["has_extend_remove"]:
 | 
				
			||||||
 | 
					            has_extend_remove = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Track if any file defines buses directly
 | 
				
			||||||
 | 
					        if analysis["has_direct_bus_config"]:
 | 
				
			||||||
 | 
					            has_direct_bus_config = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # For test.*.yaml files, extract platform and buses
 | 
				
			||||||
 | 
					        if yaml_file.name.startswith("test.") and yaml_file.suffix == ".yaml":
 | 
				
			||||||
 | 
					            # Extract platform name (e.g., test.esp32-ard.yaml -> esp32-ard)
 | 
				
			||||||
 | 
					            platform = yaml_file.stem.replace("test.", "")
 | 
				
			||||||
 | 
					            # Always add platform, even if it has no buses (empty list)
 | 
				
			||||||
 | 
					            # This allows grouping components that don't use any shared buses
 | 
				
			||||||
 | 
					            platform_buses[platform] = (
 | 
				
			||||||
 | 
					                sorted(analysis["buses"]) if analysis["buses"] else []
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return platform_buses, has_extend_remove, has_direct_bus_config
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def analyze_all_components(
 | 
				
			||||||
 | 
					    tests_dir: Path = None,
 | 
				
			||||||
 | 
					) -> tuple[dict[str, dict[str, list[str]]], set[str], set[str]]:
 | 
				
			||||||
 | 
					    """Analyze all component test directories.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory (defaults to auto-detect)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of:
 | 
				
			||||||
 | 
					        - Dictionary mapping component name to platform->buses mapping
 | 
				
			||||||
 | 
					        - Set of component names that cannot be grouped
 | 
				
			||||||
 | 
					        - Set of component names that define buses directly (need migration warning)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if tests_dir is None:
 | 
				
			||||||
 | 
					        tests_dir = Path("tests/components")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not tests_dir.exists():
 | 
				
			||||||
 | 
					        print(f"Error: {tests_dir} does not exist", file=sys.stderr)
 | 
				
			||||||
 | 
					        return {}, set(), set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    components = {}
 | 
				
			||||||
 | 
					    non_groupable = set()
 | 
				
			||||||
 | 
					    direct_bus_components = set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for component_dir in sorted(tests_dir.iterdir()):
 | 
				
			||||||
 | 
					        if not component_dir.is_dir():
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        component_name = component_dir.name
 | 
				
			||||||
 | 
					        platform_buses, has_extend_remove, has_direct_bus_config = analyze_component(
 | 
				
			||||||
 | 
					            component_dir
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if platform_buses:
 | 
				
			||||||
 | 
					            components[component_name] = platform_buses
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Note: Components using $component_dir are now groupable because the merge
 | 
				
			||||||
 | 
					        # script rewrites these to absolute paths with component-specific substitutions
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Check if component is explicitly isolated
 | 
				
			||||||
 | 
					        # These have known issues that prevent grouping with other components
 | 
				
			||||||
 | 
					        if component_name in ISOLATED_COMPONENTS:
 | 
				
			||||||
 | 
					            non_groupable.add(component_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Check if component is a base bus component
 | 
				
			||||||
 | 
					        # These ARE the bus platform implementations and define buses directly for testing
 | 
				
			||||||
 | 
					        # They cannot be grouped with components that use bus packages (causes ID conflicts)
 | 
				
			||||||
 | 
					        if component_name in BASE_BUS_COMPONENTS:
 | 
				
			||||||
 | 
					            non_groupable.add(component_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Check if component uses !extend or !remove directives
 | 
				
			||||||
 | 
					        # These rely on specific config structure and cannot be merged with other components
 | 
				
			||||||
 | 
					        # The directives work within a component's own package hierarchy but break when
 | 
				
			||||||
 | 
					        # merging independent components together
 | 
				
			||||||
 | 
					        if has_extend_remove:
 | 
				
			||||||
 | 
					            non_groupable.add(component_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Check if component defines buses directly in test files
 | 
				
			||||||
 | 
					        # These create unique bus IDs and cause conflicts when merged
 | 
				
			||||||
 | 
					        # Exclude base bus components (i2c, spi, uart, etc.) since they ARE the platform
 | 
				
			||||||
 | 
					        if has_direct_bus_config and component_name not in BASE_BUS_COMPONENTS:
 | 
				
			||||||
 | 
					            non_groupable.add(component_name)
 | 
				
			||||||
 | 
					            direct_bus_components.add(component_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return components, non_groupable, direct_bus_components
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def create_grouping_signature(
 | 
				
			||||||
 | 
					    platform_buses: dict[str, list[str]], platform: str
 | 
				
			||||||
 | 
					) -> str:
 | 
				
			||||||
 | 
					    """Create a signature string for grouping components.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Components with the same signature can be grouped together for testing.
 | 
				
			||||||
 | 
					    All valid bus types can be grouped since --testing-mode bypasses runtime
 | 
				
			||||||
 | 
					    conflicts during config validation.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        platform_buses: Mapping of platform to list of buses
 | 
				
			||||||
 | 
					        platform: The specific platform to create signature for
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Signature string (e.g., "i2c" or "uart") or empty if no valid buses
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    buses = platform_buses.get(platform, [])
 | 
				
			||||||
 | 
					    if not buses:
 | 
				
			||||||
 | 
					        return ""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Only include valid bus types in signature
 | 
				
			||||||
 | 
					    common_buses = get_common_bus_packages()
 | 
				
			||||||
 | 
					    valid_buses = [b for b in buses if b in common_buses]
 | 
				
			||||||
 | 
					    if not valid_buses:
 | 
				
			||||||
 | 
					        return ""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return "+".join(sorted(valid_buses))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def group_components_by_signature(
 | 
				
			||||||
 | 
					    components: dict[str, dict[str, list[str]]], platform: str
 | 
				
			||||||
 | 
					) -> dict[str, list[str]]:
 | 
				
			||||||
 | 
					    """Group components by their bus signature for a specific platform.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        components: Component analysis results from analyze_all_components()
 | 
				
			||||||
 | 
					        platform: Platform to group for (e.g., "esp32-ard")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Dictionary mapping signature to list of component names
 | 
				
			||||||
 | 
					        Example: {"i2c+uart_19200": ["comp1", "comp2"], "spi": ["comp3"]}
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    signature_groups: dict[str, list[str]] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for component_name, platform_buses in components.items():
 | 
				
			||||||
 | 
					        if platform not in platform_buses:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        signature = create_grouping_signature(platform_buses, platform)
 | 
				
			||||||
 | 
					        if not signature:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if signature not in signature_groups:
 | 
				
			||||||
 | 
					            signature_groups[signature] = []
 | 
				
			||||||
 | 
					        signature_groups[signature].append(component_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return signature_groups
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main() -> None:
 | 
				
			||||||
 | 
					    """Main entry point."""
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description="Analyze component test files to detect common bus usage"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--components",
 | 
				
			||||||
 | 
					        "-c",
 | 
				
			||||||
 | 
					        nargs="+",
 | 
				
			||||||
 | 
					        help="Specific components to analyze (default: all)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--platform",
 | 
				
			||||||
 | 
					        "-p",
 | 
				
			||||||
 | 
					        help="Show grouping for a specific platform",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--json",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Output as JSON",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--group",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Show component groupings by bus signature",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Analyze components
 | 
				
			||||||
 | 
					    tests_dir = Path("tests/components")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if args.components:
 | 
				
			||||||
 | 
					        # Analyze only specified components
 | 
				
			||||||
 | 
					        components = {}
 | 
				
			||||||
 | 
					        non_groupable = set()
 | 
				
			||||||
 | 
					        direct_bus_components = set()
 | 
				
			||||||
 | 
					        for comp in args.components:
 | 
				
			||||||
 | 
					            comp_dir = tests_dir / comp
 | 
				
			||||||
 | 
					            platform_buses, has_extend_remove, has_direct_bus_config = (
 | 
				
			||||||
 | 
					                analyze_component(comp_dir)
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            if platform_buses:
 | 
				
			||||||
 | 
					                components[comp] = platform_buses
 | 
				
			||||||
 | 
					            # Note: Components using $component_dir are now groupable
 | 
				
			||||||
 | 
					            if comp in ISOLATED_COMPONENTS:
 | 
				
			||||||
 | 
					                non_groupable.add(comp)
 | 
				
			||||||
 | 
					            if comp in BASE_BUS_COMPONENTS:
 | 
				
			||||||
 | 
					                non_groupable.add(comp)
 | 
				
			||||||
 | 
					            if has_direct_bus_config and comp not in BASE_BUS_COMPONENTS:
 | 
				
			||||||
 | 
					                non_groupable.add(comp)
 | 
				
			||||||
 | 
					                direct_bus_components.add(comp)
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # Analyze all components
 | 
				
			||||||
 | 
					        components, non_groupable, direct_bus_components = analyze_all_components(
 | 
				
			||||||
 | 
					            tests_dir
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Output results
 | 
				
			||||||
 | 
					    if args.group and args.platform:
 | 
				
			||||||
 | 
					        # Show groupings for a specific platform
 | 
				
			||||||
 | 
					        groups = group_components_by_signature(components, args.platform)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if args.json:
 | 
				
			||||||
 | 
					            print(json.dumps(groups, indent=2))
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            print(f"Component groupings for {args.platform}:")
 | 
				
			||||||
 | 
					            print()
 | 
				
			||||||
 | 
					            for signature, comp_list in sorted(groups.items()):
 | 
				
			||||||
 | 
					                print(f"  {signature}:")
 | 
				
			||||||
 | 
					                for comp in sorted(comp_list):
 | 
				
			||||||
 | 
					                    print(f"    - {comp}")
 | 
				
			||||||
 | 
					                print()
 | 
				
			||||||
 | 
					    elif args.json:
 | 
				
			||||||
 | 
					        # JSON output
 | 
				
			||||||
 | 
					        print(json.dumps(components, indent=2))
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # Human-readable output
 | 
				
			||||||
 | 
					        for component, platform_buses in sorted(components.items()):
 | 
				
			||||||
 | 
					            non_groupable_marker = (
 | 
				
			||||||
 | 
					                " [NON-GROUPABLE]" if component in non_groupable else ""
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            print(f"{component}{non_groupable_marker}:")
 | 
				
			||||||
 | 
					            for platform, buses in sorted(platform_buses.items()):
 | 
				
			||||||
 | 
					                bus_str = ", ".join(buses)
 | 
				
			||||||
 | 
					                print(f"  {platform}: {bus_str}")
 | 
				
			||||||
 | 
					        print()
 | 
				
			||||||
 | 
					        print(f"Total components analyzed: {len(components)}")
 | 
				
			||||||
 | 
					        if non_groupable:
 | 
				
			||||||
 | 
					            print(f"Non-groupable components (use local files): {len(non_groupable)}")
 | 
				
			||||||
 | 
					            for comp in sorted(non_groupable):
 | 
				
			||||||
 | 
					                print(f"  - {comp}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    main()
 | 
				
			||||||
@@ -237,6 +237,16 @@ def main() -> None:
 | 
				
			|||||||
    result = subprocess.run(cmd, capture_output=True, text=True, check=True)
 | 
					    result = subprocess.run(cmd, capture_output=True, text=True, check=True)
 | 
				
			||||||
    changed_components = parse_list_components_output(result.stdout)
 | 
					    changed_components = parse_list_components_output(result.stdout)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Filter to only components that have test files
 | 
				
			||||||
 | 
					    # Components without tests shouldn't generate CI test jobs
 | 
				
			||||||
 | 
					    tests_dir = Path(root_path) / "tests" / "components"
 | 
				
			||||||
 | 
					    changed_components_with_tests = [
 | 
				
			||||||
 | 
					        component
 | 
				
			||||||
 | 
					        for component in changed_components
 | 
				
			||||||
 | 
					        if (component_test_dir := tests_dir / component).exists()
 | 
				
			||||||
 | 
					        and any(component_test_dir.glob("test.*.yaml"))
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Build output
 | 
					    # Build output
 | 
				
			||||||
    output: dict[str, Any] = {
 | 
					    output: dict[str, Any] = {
 | 
				
			||||||
        "integration_tests": run_integration,
 | 
					        "integration_tests": run_integration,
 | 
				
			||||||
@@ -244,7 +254,8 @@ def main() -> None:
 | 
				
			|||||||
        "clang_format": run_clang_format,
 | 
					        "clang_format": run_clang_format,
 | 
				
			||||||
        "python_linters": run_python_linters,
 | 
					        "python_linters": run_python_linters,
 | 
				
			||||||
        "changed_components": changed_components,
 | 
					        "changed_components": changed_components,
 | 
				
			||||||
        "component_test_count": len(changed_components),
 | 
					        "changed_components_with_tests": changed_components_with_tests,
 | 
				
			||||||
 | 
					        "component_test_count": len(changed_components_with_tests),
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Output as JSON
 | 
					    # Output as JSON
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										379
									
								
								script/merge_component_configs.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										379
									
								
								script/merge_component_configs.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,379 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					"""Merge multiple component test configurations into a single test file.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This script combines multiple component test files that use the same common bus
 | 
				
			||||||
 | 
					configurations into a single merged test file. This allows testing multiple
 | 
				
			||||||
 | 
					compatible components together, reducing CI build time.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					The merger handles:
 | 
				
			||||||
 | 
					- Component-specific substitutions (prefixing to avoid conflicts)
 | 
				
			||||||
 | 
					- Multiple instances of component configurations
 | 
				
			||||||
 | 
					- Shared common bus packages (included only once)
 | 
				
			||||||
 | 
					- Platform-specific configurations
 | 
				
			||||||
 | 
					- Uses ESPHome's built-in merge_config for proper YAML merging
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
 | 
					import re
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					from typing import Any
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Add esphome to path so we can import from it
 | 
				
			||||||
 | 
					sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from esphome import yaml_util
 | 
				
			||||||
 | 
					from esphome.config_helpers import merge_config
 | 
				
			||||||
 | 
					from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def load_yaml_file(yaml_file: Path) -> dict:
 | 
				
			||||||
 | 
					    """Load YAML file using ESPHome's YAML loader.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        yaml_file: Path to the YAML file
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Parsed YAML as dictionary
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not yaml_file.exists():
 | 
				
			||||||
 | 
					        raise FileNotFoundError(f"YAML file not found: {yaml_file}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return yaml_util.load_yaml(yaml_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def extract_packages_from_yaml(data: dict) -> dict[str, str]:
 | 
				
			||||||
 | 
					    """Extract COMMON BUS package includes from parsed YAML.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Only extracts packages that are from test_build_components/common/,
 | 
				
			||||||
 | 
					    ignoring component-specific packages.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        data: Parsed YAML dictionary
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Dictionary mapping package name to include path (as string representation)
 | 
				
			||||||
 | 
					        Only includes common bus packages (i2c, spi, uart, etc.)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if "packages" not in data:
 | 
				
			||||||
 | 
					        return {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    packages_value = data["packages"]
 | 
				
			||||||
 | 
					    if not isinstance(packages_value, dict):
 | 
				
			||||||
 | 
					        # List format doesn't include common bus packages (those use dict format)
 | 
				
			||||||
 | 
					        return {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Get common bus package names (cached)
 | 
				
			||||||
 | 
					    common_bus_packages = get_common_bus_packages()
 | 
				
			||||||
 | 
					    packages = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Dictionary format: packages: {name: value}
 | 
				
			||||||
 | 
					    for name, value in packages_value.items():
 | 
				
			||||||
 | 
					        # Only include common bus packages, ignore component-specific ones
 | 
				
			||||||
 | 
					        if name not in common_bus_packages:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					        packages[name] = str(value)
 | 
				
			||||||
 | 
					        # Also track package dependencies (e.g., modbus includes uart)
 | 
				
			||||||
 | 
					        if name not in PACKAGE_DEPENDENCIES:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					        for dep in PACKAGE_DEPENDENCIES[name]:
 | 
				
			||||||
 | 
					            if dep not in common_bus_packages:
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					            # Mark as included via dependency
 | 
				
			||||||
 | 
					            packages[f"_dep_{dep}"] = f"(included via {name})"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return packages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def prefix_substitutions_in_dict(
 | 
				
			||||||
 | 
					    data: Any, prefix: str, exclude: set[str] | None = None
 | 
				
			||||||
 | 
					) -> Any:
 | 
				
			||||||
 | 
					    """Recursively prefix all substitution references in a data structure.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        data: YAML data structure (dict, list, or scalar)
 | 
				
			||||||
 | 
					        prefix: Prefix to add to substitution names
 | 
				
			||||||
 | 
					        exclude: Set of substitution names to exclude from prefixing
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Data structure with prefixed substitution references
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if exclude is None:
 | 
				
			||||||
 | 
					        exclude = set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def replace_sub(text: str) -> str:
 | 
				
			||||||
 | 
					        """Replace substitution references in a string."""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        def replace_match(match):
 | 
				
			||||||
 | 
					            sub_name = match.group(1)
 | 
				
			||||||
 | 
					            if sub_name in exclude:
 | 
				
			||||||
 | 
					                return match.group(0)
 | 
				
			||||||
 | 
					            # Always use braced format in output for consistency
 | 
				
			||||||
 | 
					            return f"${{{prefix}_{sub_name}}}"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Match both ${substitution} and $substitution formats
 | 
				
			||||||
 | 
					        return re.sub(r"\$\{?(\w+)\}?", replace_match, text)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if isinstance(data, dict):
 | 
				
			||||||
 | 
					        result = {}
 | 
				
			||||||
 | 
					        for key, value in data.items():
 | 
				
			||||||
 | 
					            result[key] = prefix_substitutions_in_dict(value, prefix, exclude)
 | 
				
			||||||
 | 
					        return result
 | 
				
			||||||
 | 
					    if isinstance(data, list):
 | 
				
			||||||
 | 
					        return [prefix_substitutions_in_dict(item, prefix, exclude) for item in data]
 | 
				
			||||||
 | 
					    if isinstance(data, str):
 | 
				
			||||||
 | 
					        return replace_sub(data)
 | 
				
			||||||
 | 
					    return data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def deduplicate_by_id(data: dict) -> dict:
 | 
				
			||||||
 | 
					    """Deduplicate list items with the same ID.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Keeps only the first occurrence of each ID. If items with the same ID
 | 
				
			||||||
 | 
					    are identical, this silently deduplicates. If they differ, the first
 | 
				
			||||||
 | 
					    one is kept (ESPHome's validation will catch if this causes issues).
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        data: Parsed config dictionary
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Config with deduplicated lists
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not isinstance(data, dict):
 | 
				
			||||||
 | 
					        return data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    result = {}
 | 
				
			||||||
 | 
					    for key, value in data.items():
 | 
				
			||||||
 | 
					        if isinstance(value, list):
 | 
				
			||||||
 | 
					            # Check for items with 'id' field
 | 
				
			||||||
 | 
					            seen_ids = set()
 | 
				
			||||||
 | 
					            deduped_list = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            for item in value:
 | 
				
			||||||
 | 
					                if isinstance(item, dict) and "id" in item:
 | 
				
			||||||
 | 
					                    item_id = item["id"]
 | 
				
			||||||
 | 
					                    if item_id not in seen_ids:
 | 
				
			||||||
 | 
					                        seen_ids.add(item_id)
 | 
				
			||||||
 | 
					                        deduped_list.append(item)
 | 
				
			||||||
 | 
					                    # else: skip duplicate ID (keep first occurrence)
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    # No ID, just add it
 | 
				
			||||||
 | 
					                    deduped_list.append(item)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            result[key] = deduped_list
 | 
				
			||||||
 | 
					        elif isinstance(value, dict):
 | 
				
			||||||
 | 
					            # Recursively deduplicate nested dicts
 | 
				
			||||||
 | 
					            result[key] = deduplicate_by_id(value)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            result[key] = value
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return result
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def merge_component_configs(
 | 
				
			||||||
 | 
					    component_names: list[str],
 | 
				
			||||||
 | 
					    platform: str,
 | 
				
			||||||
 | 
					    tests_dir: Path,
 | 
				
			||||||
 | 
					    output_file: Path,
 | 
				
			||||||
 | 
					) -> None:
 | 
				
			||||||
 | 
					    """Merge multiple component test configs into a single file.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_names: List of component names to merge
 | 
				
			||||||
 | 
					        platform: Platform to merge for (e.g., "esp32-ard")
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					        output_file: Path to output merged config file
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if not component_names:
 | 
				
			||||||
 | 
					        raise ValueError("No components specified")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Track packages to ensure they're identical
 | 
				
			||||||
 | 
					    all_packages = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Start with empty config
 | 
				
			||||||
 | 
					    merged_config_data = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Process each component
 | 
				
			||||||
 | 
					    for comp_name in component_names:
 | 
				
			||||||
 | 
					        comp_dir = tests_dir / comp_name
 | 
				
			||||||
 | 
					        test_file = comp_dir / f"test.{platform}.yaml"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not test_file.exists():
 | 
				
			||||||
 | 
					            raise FileNotFoundError(f"Test file not found: {test_file}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Load the component's test file
 | 
				
			||||||
 | 
					        comp_data = load_yaml_file(test_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Validate packages are compatible
 | 
				
			||||||
 | 
					        # Components with no packages (no_buses) can merge with any group
 | 
				
			||||||
 | 
					        comp_packages = extract_packages_from_yaml(comp_data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if all_packages is None:
 | 
				
			||||||
 | 
					            # First component - set the baseline
 | 
				
			||||||
 | 
					            all_packages = comp_packages
 | 
				
			||||||
 | 
					        elif not comp_packages:
 | 
				
			||||||
 | 
					            # This component has no packages (no_buses) - it can merge with any group
 | 
				
			||||||
 | 
					            pass
 | 
				
			||||||
 | 
					        elif not all_packages:
 | 
				
			||||||
 | 
					            # Previous components had no packages, but this one does - adopt these packages
 | 
				
			||||||
 | 
					            all_packages = comp_packages
 | 
				
			||||||
 | 
					        elif comp_packages != all_packages:
 | 
				
			||||||
 | 
					            # Both have packages but they differ - this is an error
 | 
				
			||||||
 | 
					            raise ValueError(
 | 
				
			||||||
 | 
					                f"Component {comp_name} has different packages than previous components. "
 | 
				
			||||||
 | 
					                f"Expected: {all_packages}, Got: {comp_packages}. "
 | 
				
			||||||
 | 
					                f"All components must use the same common bus configs to be merged."
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Handle $component_dir by replacing with absolute path
 | 
				
			||||||
 | 
					        # This allows components that use local file references to be grouped
 | 
				
			||||||
 | 
					        comp_abs_dir = str(comp_dir.absolute())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Save top-level substitutions BEFORE expanding packages
 | 
				
			||||||
 | 
					        # In ESPHome, top-level substitutions override package substitutions
 | 
				
			||||||
 | 
					        top_level_subs = (
 | 
				
			||||||
 | 
					            comp_data["substitutions"].copy()
 | 
				
			||||||
 | 
					            if "substitutions" in comp_data and comp_data["substitutions"] is not None
 | 
				
			||||||
 | 
					            else {}
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Expand packages - but we'll restore substitution priority after
 | 
				
			||||||
 | 
					        if "packages" in comp_data:
 | 
				
			||||||
 | 
					            packages_value = comp_data["packages"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if isinstance(packages_value, dict):
 | 
				
			||||||
 | 
					                # Dict format - check each package
 | 
				
			||||||
 | 
					                common_bus_packages = get_common_bus_packages()
 | 
				
			||||||
 | 
					                for pkg_name, pkg_value in list(packages_value.items()):
 | 
				
			||||||
 | 
					                    if pkg_name in common_bus_packages:
 | 
				
			||||||
 | 
					                        continue
 | 
				
			||||||
 | 
					                    if not isinstance(pkg_value, dict):
 | 
				
			||||||
 | 
					                        continue
 | 
				
			||||||
 | 
					                    # Component-specific package - expand its content into top level
 | 
				
			||||||
 | 
					                    comp_data = merge_config(comp_data, pkg_value)
 | 
				
			||||||
 | 
					            elif isinstance(packages_value, list):
 | 
				
			||||||
 | 
					                # List format - expand all package includes
 | 
				
			||||||
 | 
					                for pkg_value in packages_value:
 | 
				
			||||||
 | 
					                    if not isinstance(pkg_value, dict):
 | 
				
			||||||
 | 
					                        continue
 | 
				
			||||||
 | 
					                    comp_data = merge_config(comp_data, pkg_value)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Remove all packages (common will be re-added at the end)
 | 
				
			||||||
 | 
					            del comp_data["packages"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Restore top-level substitution priority
 | 
				
			||||||
 | 
					        # Top-level substitutions override any from packages
 | 
				
			||||||
 | 
					        if "substitutions" not in comp_data or comp_data["substitutions"] is None:
 | 
				
			||||||
 | 
					            comp_data["substitutions"] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Merge: package subs as base, top-level subs override
 | 
				
			||||||
 | 
					        comp_data["substitutions"].update(top_level_subs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Now prefix the final merged substitutions
 | 
				
			||||||
 | 
					        comp_data["substitutions"] = {
 | 
				
			||||||
 | 
					            f"{comp_name}_{sub_name}": sub_value
 | 
				
			||||||
 | 
					            for sub_name, sub_value in comp_data["substitutions"].items()
 | 
				
			||||||
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Add component_dir substitution with absolute path for this component
 | 
				
			||||||
 | 
					        comp_data["substitutions"][f"{comp_name}_component_dir"] = comp_abs_dir
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Prefix substitution references throughout the config
 | 
				
			||||||
 | 
					        comp_data = prefix_substitutions_in_dict(comp_data, comp_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Use ESPHome's merge_config to merge this component into the result
 | 
				
			||||||
 | 
					        # merge_config handles list merging with ID-based deduplication automatically
 | 
				
			||||||
 | 
					        merged_config_data = merge_config(merged_config_data, comp_data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Add packages back (only once, since they're identical)
 | 
				
			||||||
 | 
					    # IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
 | 
				
			||||||
 | 
					    # Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
 | 
				
			||||||
 | 
					    if all_packages:
 | 
				
			||||||
 | 
					        first_comp_data = load_yaml_file(
 | 
				
			||||||
 | 
					            tests_dir / component_names[0] / f"test.{platform}.yaml"
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        if "packages" in first_comp_data and isinstance(
 | 
				
			||||||
 | 
					            first_comp_data["packages"], dict
 | 
				
			||||||
 | 
					        ):
 | 
				
			||||||
 | 
					            # Filter to only include common bus packages
 | 
				
			||||||
 | 
					            # Only dict format can contain common bus packages
 | 
				
			||||||
 | 
					            common_bus_packages = get_common_bus_packages()
 | 
				
			||||||
 | 
					            filtered_packages = {
 | 
				
			||||||
 | 
					                name: value
 | 
				
			||||||
 | 
					                for name, value in first_comp_data["packages"].items()
 | 
				
			||||||
 | 
					                if name in common_bus_packages
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            if filtered_packages:
 | 
				
			||||||
 | 
					                merged_config_data["packages"] = filtered_packages
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Deduplicate items with same ID (keeps first occurrence)
 | 
				
			||||||
 | 
					    merged_config_data = deduplicate_by_id(merged_config_data)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Remove esphome section since it will be provided by the wrapper file
 | 
				
			||||||
 | 
					    # The wrapper file includes this merged config via packages and provides
 | 
				
			||||||
 | 
					    # the proper esphome: section with name, platform, etc.
 | 
				
			||||||
 | 
					    if "esphome" in merged_config_data:
 | 
				
			||||||
 | 
					        del merged_config_data["esphome"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Write merged config
 | 
				
			||||||
 | 
					    output_file.parent.mkdir(parents=True, exist_ok=True)
 | 
				
			||||||
 | 
					    yaml_content = yaml_util.dump(merged_config_data)
 | 
				
			||||||
 | 
					    output_file.write_text(yaml_content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"Successfully merged {len(component_names)} components into {output_file}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main() -> None:
 | 
				
			||||||
 | 
					    """Main entry point."""
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description="Merge multiple component test configs into a single file"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--components",
 | 
				
			||||||
 | 
					        "-c",
 | 
				
			||||||
 | 
					        required=True,
 | 
				
			||||||
 | 
					        help="Comma-separated list of component names to merge",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--platform",
 | 
				
			||||||
 | 
					        "-p",
 | 
				
			||||||
 | 
					        required=True,
 | 
				
			||||||
 | 
					        help="Platform to merge for (e.g., esp32-ard)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--output",
 | 
				
			||||||
 | 
					        "-o",
 | 
				
			||||||
 | 
					        required=True,
 | 
				
			||||||
 | 
					        type=Path,
 | 
				
			||||||
 | 
					        help="Output file path for merged config",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--tests-dir",
 | 
				
			||||||
 | 
					        type=Path,
 | 
				
			||||||
 | 
					        default=Path("tests/components"),
 | 
				
			||||||
 | 
					        help="Path to tests/components directory",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    component_names = [c.strip() for c in args.components.split(",")]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        merge_component_configs(
 | 
				
			||||||
 | 
					            component_names=component_names,
 | 
				
			||||||
 | 
					            platform=args.platform,
 | 
				
			||||||
 | 
					            tests_dir=args.tests_dir,
 | 
				
			||||||
 | 
					            output_file=args.output,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					    except Exception as e:
 | 
				
			||||||
 | 
					        print(f"Error merging configs: {e}", file=sys.stderr)
 | 
				
			||||||
 | 
					        import traceback
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        traceback.print_exc()
 | 
				
			||||||
 | 
					        sys.exit(1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    main()
 | 
				
			||||||
							
								
								
									
										268
									
								
								script/split_components_for_ci.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										268
									
								
								script/split_components_for_ci.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,268 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					"""Split components into batches with intelligent grouping.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This script analyzes components to identify which ones share common bus configurations
 | 
				
			||||||
 | 
					and intelligently groups them into batches to maximize the efficiency of the
 | 
				
			||||||
 | 
					component grouping system in CI.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Components with the same bus signature are placed in the same batch whenever possible,
 | 
				
			||||||
 | 
					allowing the test_build_components.py script to merge them into single builds.
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					import json
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Add esphome to path
 | 
				
			||||||
 | 
					sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from script.analyze_component_buses import (
 | 
				
			||||||
 | 
					    ISOLATED_COMPONENTS,
 | 
				
			||||||
 | 
					    NO_BUSES_SIGNATURE,
 | 
				
			||||||
 | 
					    analyze_all_components,
 | 
				
			||||||
 | 
					    create_grouping_signature,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Weighting for batch creation
 | 
				
			||||||
 | 
					# Isolated components can't be grouped/merged, so they count as 10x
 | 
				
			||||||
 | 
					# Groupable components can be merged into single builds, so they count as 1x
 | 
				
			||||||
 | 
					ISOLATED_WEIGHT = 10
 | 
				
			||||||
 | 
					GROUPABLE_WEIGHT = 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def has_test_files(component_name: str, tests_dir: Path) -> bool:
 | 
				
			||||||
 | 
					    """Check if a component has test files.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_name: Name of the component
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        True if the component has test.*.yaml files
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    component_dir = tests_dir / component_name
 | 
				
			||||||
 | 
					    if not component_dir.exists() or not component_dir.is_dir():
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Check for test.*.yaml files
 | 
				
			||||||
 | 
					    return any(component_dir.glob("test.*.yaml"))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def create_intelligent_batches(
 | 
				
			||||||
 | 
					    components: list[str],
 | 
				
			||||||
 | 
					    tests_dir: Path,
 | 
				
			||||||
 | 
					    batch_size: int = 40,
 | 
				
			||||||
 | 
					) -> list[list[str]]:
 | 
				
			||||||
 | 
					    """Create batches optimized for component grouping.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        components: List of component names to batch
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					        batch_size: Target size for each batch
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        List of component batches (lists of component names)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Filter out components without test files
 | 
				
			||||||
 | 
					    # Platform components like 'climate' and 'climate_ir' don't have test files
 | 
				
			||||||
 | 
					    components_with_tests = [
 | 
				
			||||||
 | 
					        comp for comp in components if has_test_files(comp, tests_dir)
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Log filtered components to stderr for debugging
 | 
				
			||||||
 | 
					    if len(components_with_tests) < len(components):
 | 
				
			||||||
 | 
					        filtered_out = set(components) - set(components_with_tests)
 | 
				
			||||||
 | 
					        print(
 | 
				
			||||||
 | 
					            f"Note: Filtered {len(filtered_out)} components without test files: "
 | 
				
			||||||
 | 
					            f"{', '.join(sorted(filtered_out))}",
 | 
				
			||||||
 | 
					            file=sys.stderr,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Analyze all components to get their bus signatures
 | 
				
			||||||
 | 
					    component_buses, non_groupable, _direct_bus_components = analyze_all_components(
 | 
				
			||||||
 | 
					        tests_dir
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Group components by their bus signature ONLY (ignore platform)
 | 
				
			||||||
 | 
					    # All platforms will be tested by test_build_components.py for each batch
 | 
				
			||||||
 | 
					    # Key: signature, Value: list of components
 | 
				
			||||||
 | 
					    signature_groups: dict[str, list[str]] = defaultdict(list)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for component in components_with_tests:
 | 
				
			||||||
 | 
					        # Components that can't be grouped get unique signatures
 | 
				
			||||||
 | 
					        # This includes both manually curated ISOLATED_COMPONENTS and
 | 
				
			||||||
 | 
					        # automatically detected non_groupable components
 | 
				
			||||||
 | 
					        # These can share a batch/runner but won't be grouped/merged
 | 
				
			||||||
 | 
					        if component in ISOLATED_COMPONENTS or component in non_groupable:
 | 
				
			||||||
 | 
					            signature_groups[f"isolated_{component}"].append(component)
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Get signature from any platform (they should all have the same buses)
 | 
				
			||||||
 | 
					        # Components not in component_buses were filtered out by has_test_files check
 | 
				
			||||||
 | 
					        comp_platforms = component_buses[component]
 | 
				
			||||||
 | 
					        for platform, buses in comp_platforms.items():
 | 
				
			||||||
 | 
					            if buses:
 | 
				
			||||||
 | 
					                signature = create_grouping_signature({platform: buses}, platform)
 | 
				
			||||||
 | 
					                # Group by signature only - platform doesn't matter for batching
 | 
				
			||||||
 | 
					                signature_groups[signature].append(component)
 | 
				
			||||||
 | 
					                break  # Only use first platform for grouping
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # No buses found for any platform - can be grouped together
 | 
				
			||||||
 | 
					            signature_groups[NO_BUSES_SIGNATURE].append(component)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create batches by keeping signature groups together
 | 
				
			||||||
 | 
					    # Components with the same signature stay in the same batches
 | 
				
			||||||
 | 
					    batches = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Sort signature groups to prioritize groupable components
 | 
				
			||||||
 | 
					    # 1. Put "isolated_*" signatures last (can't be grouped with others)
 | 
				
			||||||
 | 
					    # 2. Sort groupable signatures by size (largest first)
 | 
				
			||||||
 | 
					    # 3. "no_buses" components CAN be grouped together
 | 
				
			||||||
 | 
					    def sort_key(item):
 | 
				
			||||||
 | 
					        signature, components = item
 | 
				
			||||||
 | 
					        is_isolated = signature.startswith("isolated_")
 | 
				
			||||||
 | 
					        # Put "isolated_*" last (1), groupable first (0)
 | 
				
			||||||
 | 
					        # Within each category, sort by size (largest first)
 | 
				
			||||||
 | 
					        return (is_isolated, -len(components))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    sorted_groups = sorted(signature_groups.items(), key=sort_key)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Strategy: Create batches using weighted sizes
 | 
				
			||||||
 | 
					    # - Isolated components count as 10x (since they can't be grouped/merged)
 | 
				
			||||||
 | 
					    # - Groupable components count as 1x (can be merged into single builds)
 | 
				
			||||||
 | 
					    # - This distributes isolated components across more runners
 | 
				
			||||||
 | 
					    # - Ensures each runner has a good mix of groupable vs isolated components
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    current_batch = []
 | 
				
			||||||
 | 
					    current_weight = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for signature, group_components in sorted_groups:
 | 
				
			||||||
 | 
					        is_isolated = signature.startswith("isolated_")
 | 
				
			||||||
 | 
					        weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        for component in group_components:
 | 
				
			||||||
 | 
					            # Check if adding this component would exceed the batch size
 | 
				
			||||||
 | 
					            if current_weight + weight_per_component > batch_size and current_batch:
 | 
				
			||||||
 | 
					                # Start a new batch
 | 
				
			||||||
 | 
					                batches.append(current_batch)
 | 
				
			||||||
 | 
					                current_batch = []
 | 
				
			||||||
 | 
					                current_weight = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Add component to current batch
 | 
				
			||||||
 | 
					            current_batch.append(component)
 | 
				
			||||||
 | 
					            current_weight += weight_per_component
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Don't forget the last batch
 | 
				
			||||||
 | 
					    if current_batch:
 | 
				
			||||||
 | 
					        batches.append(current_batch)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return batches
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main() -> int:
 | 
				
			||||||
 | 
					    """Main entry point."""
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description="Split components into intelligent batches for CI testing"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--components",
 | 
				
			||||||
 | 
					        "-c",
 | 
				
			||||||
 | 
					        required=True,
 | 
				
			||||||
 | 
					        help="JSON array of component names",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--batch-size",
 | 
				
			||||||
 | 
					        "-b",
 | 
				
			||||||
 | 
					        type=int,
 | 
				
			||||||
 | 
					        default=40,
 | 
				
			||||||
 | 
					        help="Target batch size (default: 40, weighted)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--tests-dir",
 | 
				
			||||||
 | 
					        type=Path,
 | 
				
			||||||
 | 
					        default=Path("tests/components"),
 | 
				
			||||||
 | 
					        help="Path to tests/components directory",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--output",
 | 
				
			||||||
 | 
					        "-o",
 | 
				
			||||||
 | 
					        choices=["json", "github"],
 | 
				
			||||||
 | 
					        default="github",
 | 
				
			||||||
 | 
					        help="Output format (json or github for GitHub Actions)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Parse component list from JSON
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        components = json.loads(args.components)
 | 
				
			||||||
 | 
					    except json.JSONDecodeError as e:
 | 
				
			||||||
 | 
					        print(f"Error parsing components JSON: {e}", file=sys.stderr)
 | 
				
			||||||
 | 
					        return 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not isinstance(components, list):
 | 
				
			||||||
 | 
					        print("Components must be a JSON array", file=sys.stderr)
 | 
				
			||||||
 | 
					        return 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create intelligent batches
 | 
				
			||||||
 | 
					    batches = create_intelligent_batches(
 | 
				
			||||||
 | 
					        components=components,
 | 
				
			||||||
 | 
					        tests_dir=args.tests_dir,
 | 
				
			||||||
 | 
					        batch_size=args.batch_size,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Convert batches to space-separated strings for CI
 | 
				
			||||||
 | 
					    batch_strings = [" ".join(batch) for batch in batches]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if args.output == "json":
 | 
				
			||||||
 | 
					        # Output as JSON array
 | 
				
			||||||
 | 
					        print(json.dumps(batch_strings))
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        # Output for GitHub Actions (set output)
 | 
				
			||||||
 | 
					        output_json = json.dumps(batch_strings)
 | 
				
			||||||
 | 
					        print(f"components={output_json}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Print summary to stderr so it shows in CI logs
 | 
				
			||||||
 | 
					    # Count actual components being batched
 | 
				
			||||||
 | 
					    actual_components = sum(len(batch.split()) for batch in batch_strings)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Re-analyze to get isolated component counts for summary
 | 
				
			||||||
 | 
					    _, non_groupable, _ = analyze_all_components(args.tests_dir)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Count isolated vs groupable components
 | 
				
			||||||
 | 
					    all_batched_components = [comp for batch in batches for comp in batch]
 | 
				
			||||||
 | 
					    isolated_count = sum(
 | 
				
			||||||
 | 
					        1
 | 
				
			||||||
 | 
					        for comp in all_batched_components
 | 
				
			||||||
 | 
					        if comp in ISOLATED_COMPONENTS or comp in non_groupable
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    groupable_count = actual_components - isolated_count
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print("\n=== Intelligent Batch Summary ===", file=sys.stderr)
 | 
				
			||||||
 | 
					    print(f"Total components requested: {len(components)}", file=sys.stderr)
 | 
				
			||||||
 | 
					    print(f"Components with test files: {actual_components}", file=sys.stderr)
 | 
				
			||||||
 | 
					    print(f"  - Groupable (weight=1): {groupable_count}", file=sys.stderr)
 | 
				
			||||||
 | 
					    print(f"  - Isolated (weight=10): {isolated_count}", file=sys.stderr)
 | 
				
			||||||
 | 
					    if actual_components < len(components):
 | 
				
			||||||
 | 
					        print(
 | 
				
			||||||
 | 
					            f"Components skipped (no test files): {len(components) - actual_components}",
 | 
				
			||||||
 | 
					            file=sys.stderr,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					    print(f"Number of batches: {len(batches)}", file=sys.stderr)
 | 
				
			||||||
 | 
					    print(f"Batch size target (weighted): {args.batch_size}", file=sys.stderr)
 | 
				
			||||||
 | 
					    if len(batches) > 0:
 | 
				
			||||||
 | 
					        print(
 | 
				
			||||||
 | 
					            f"Average components per batch: {actual_components / len(batches):.1f}",
 | 
				
			||||||
 | 
					            file=sys.stderr,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					    print(file=sys.stderr)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    sys.exit(main())
 | 
				
			||||||
@@ -1,106 +0,0 @@
 | 
				
			|||||||
#!/usr/bin/env bash
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
set -e
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
help() {
 | 
					 | 
				
			||||||
  echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>]" 1>&2
 | 
					 | 
				
			||||||
  echo 1>&2
 | 
					 | 
				
			||||||
  echo "  - e - Parameter for esphome command. Default compile. Common alternative is config." 1>&2
 | 
					 | 
				
			||||||
  echo "  - c - Component folder name to test. Default *. E.g. '-c logger'." 1>&2
 | 
					 | 
				
			||||||
  echo "  - t - Target name to test. Put '-t list' to display all possibilities. E.g. '-t esp32-s2-idf-51'." 1>&2
 | 
					 | 
				
			||||||
  exit 1
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Parse parameter:
 | 
					 | 
				
			||||||
# - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
 | 
					 | 
				
			||||||
# - `c` - Component folder name to test. Default `*`.
 | 
					 | 
				
			||||||
esphome_command="compile"
 | 
					 | 
				
			||||||
target_component="*"
 | 
					 | 
				
			||||||
while getopts e:c:t: flag
 | 
					 | 
				
			||||||
do
 | 
					 | 
				
			||||||
    case $flag in
 | 
					 | 
				
			||||||
        e) esphome_command=${OPTARG};;
 | 
					 | 
				
			||||||
        c) target_component=${OPTARG};;
 | 
					 | 
				
			||||||
        t) requested_target_platform=${OPTARG};;
 | 
					 | 
				
			||||||
        \?) help;;
 | 
					 | 
				
			||||||
    esac
 | 
					 | 
				
			||||||
done
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
cd "$(dirname "$0")/.."
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
if ! [ -d "./tests/test_build_components/build" ]; then
 | 
					 | 
				
			||||||
  mkdir ./tests/test_build_components/build
 | 
					 | 
				
			||||||
fi
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
start_esphome() {
 | 
					 | 
				
			||||||
  if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
 | 
					 | 
				
			||||||
    echo "Skipping $target_platform_with_version"
 | 
					 | 
				
			||||||
    return
 | 
					 | 
				
			||||||
  fi
 | 
					 | 
				
			||||||
  # create dynamic yaml file in `build` folder.
 | 
					 | 
				
			||||||
  # `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
 | 
					 | 
				
			||||||
  component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  cp $target_platform_file $component_test_file
 | 
					 | 
				
			||||||
  if [[ "$OSTYPE" == "darwin"* ]]; then
 | 
					 | 
				
			||||||
    # macOS sed is...different
 | 
					 | 
				
			||||||
    sed -i '' "s!\$component_test_file!../../.$f!g" $component_test_file
 | 
					 | 
				
			||||||
  else
 | 
					 | 
				
			||||||
    sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
 | 
					 | 
				
			||||||
  fi
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  # Start esphome process
 | 
					 | 
				
			||||||
  echo "> [$target_component] [$test_name] [$target_platform_with_version]"
 | 
					 | 
				
			||||||
  set -x
 | 
					 | 
				
			||||||
  # TODO: Validate escape of Command line substitution value
 | 
					 | 
				
			||||||
  python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
 | 
					 | 
				
			||||||
  { set +x; } 2>/dev/null
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Find all test yaml files.
 | 
					 | 
				
			||||||
# - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
 | 
					 | 
				
			||||||
# - `./tests/components/[target_component]/[test_name].all.yaml`
 | 
					 | 
				
			||||||
for f in ./tests/components/$target_component/*.*.yaml; do
 | 
					 | 
				
			||||||
  [ -f "$f" ] || continue
 | 
					 | 
				
			||||||
  IFS='/' read -r -a folder_name <<< "$f"
 | 
					 | 
				
			||||||
  target_component="${folder_name[3]}"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  IFS='.' read -r -a file_name <<< "${folder_name[4]}"
 | 
					 | 
				
			||||||
  test_name="${file_name[0]}"
 | 
					 | 
				
			||||||
  target_platform="${file_name[1]}"
 | 
					 | 
				
			||||||
  file_name_parts=${#file_name[@]}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
 | 
					 | 
				
			||||||
    # Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
 | 
					 | 
				
			||||||
      IFS='/' read -r -a folder_name <<< "$target_platform_file"
 | 
					 | 
				
			||||||
      IFS='.' read -r -a file_name <<< "${folder_name[3]}"
 | 
					 | 
				
			||||||
      target_platform="${file_name[1]}"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      start_esphome
 | 
					 | 
				
			||||||
    done
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  else
 | 
					 | 
				
			||||||
    # Test has defined a specific target platform.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Validate we have a base test yaml for selected platform.
 | 
					 | 
				
			||||||
    # The target_platform is sourced from the following location.
 | 
					 | 
				
			||||||
    # 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
 | 
					 | 
				
			||||||
    # 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
 | 
					 | 
				
			||||||
    target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
 | 
					 | 
				
			||||||
    if ! [ -f "$target_platform_file" ]; then
 | 
					 | 
				
			||||||
      echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
 | 
					 | 
				
			||||||
      exit 1
 | 
					 | 
				
			||||||
    fi
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
 | 
					 | 
				
			||||||
      # trim off "./tests/test_build_components/build_components_base." prefix
 | 
					 | 
				
			||||||
      target_platform_with_version=${target_platform_file:52}
 | 
					 | 
				
			||||||
      # ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
 | 
					 | 
				
			||||||
      # For example: "esp32-s3-idf-50"
 | 
					 | 
				
			||||||
      target_platform_with_version=${target_platform_with_version%.*}
 | 
					 | 
				
			||||||
      start_esphome
 | 
					 | 
				
			||||||
    done
 | 
					 | 
				
			||||||
  fi
 | 
					 | 
				
			||||||
done
 | 
					 | 
				
			||||||
							
								
								
									
										1
									
								
								script/test_build_components
									
									
									
									
									
										Symbolic link
									
								
							
							
						
						
									
										1
									
								
								script/test_build_components
									
									
									
									
									
										Symbolic link
									
								
							@@ -0,0 +1 @@
 | 
				
			|||||||
 | 
					test_build_components.py
 | 
				
			||||||
							
								
								
									
										931
									
								
								script/test_build_components.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										931
									
								
								script/test_build_components.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,931 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					"""Test ESPHome component builds with intelligent grouping.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This script replaces the bash test_build_components script with Python,
 | 
				
			||||||
 | 
					adding support for intelligent component grouping based on shared bus
 | 
				
			||||||
 | 
					configurations to reduce CI build time.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					Features:
 | 
				
			||||||
 | 
					- Analyzes components for shared common bus configs
 | 
				
			||||||
 | 
					- Groups compatible components together
 | 
				
			||||||
 | 
					- Merges configs for grouped components
 | 
				
			||||||
 | 
					- Uses --testing-mode for grouped tests
 | 
				
			||||||
 | 
					- Maintains backward compatibility with single component testing
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					import hashlib
 | 
				
			||||||
 | 
					import os
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
 | 
					import subprocess
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Add esphome to path
 | 
				
			||||||
 | 
					sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# pylint: disable=wrong-import-position
 | 
				
			||||||
 | 
					from script.analyze_component_buses import (
 | 
				
			||||||
 | 
					    BASE_BUS_COMPONENTS,
 | 
				
			||||||
 | 
					    ISOLATED_COMPONENTS,
 | 
				
			||||||
 | 
					    NO_BUSES_SIGNATURE,
 | 
				
			||||||
 | 
					    analyze_all_components,
 | 
				
			||||||
 | 
					    create_grouping_signature,
 | 
				
			||||||
 | 
					    is_platform_component,
 | 
				
			||||||
 | 
					    uses_local_file_references,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					from script.merge_component_configs import merge_component_configs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Platform-specific maximum group sizes
 | 
				
			||||||
 | 
					# ESP8266 has limited IRAM and can't handle large component groups
 | 
				
			||||||
 | 
					PLATFORM_MAX_GROUP_SIZE = {
 | 
				
			||||||
 | 
					    "esp8266-ard": 10,  # ESP8266 Arduino has limited IRAM
 | 
				
			||||||
 | 
					    "esp8266-idf": 10,  # ESP8266 IDF also has limited IRAM
 | 
				
			||||||
 | 
					    # BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
 | 
				
			||||||
 | 
					    # Other platforms can handle larger groups
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def show_disk_space_if_ci(esphome_command: str) -> None:
 | 
				
			||||||
 | 
					    """Show disk space usage if running in CI during compile.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        esphome_command: The esphome command being run (config/compile/clean)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
 | 
				
			||||||
 | 
					        print("\n" + "=" * 80)
 | 
				
			||||||
 | 
					        print("Disk Space After Build:")
 | 
				
			||||||
 | 
					        print("=" * 80)
 | 
				
			||||||
 | 
					        subprocess.run(["df", "-h"], check=False)
 | 
				
			||||||
 | 
					        print("=" * 80 + "\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def find_component_tests(
 | 
				
			||||||
 | 
					    components_dir: Path, component_pattern: str = "*"
 | 
				
			||||||
 | 
					) -> dict[str, list[Path]]:
 | 
				
			||||||
 | 
					    """Find all component test files.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        components_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					        component_pattern: Glob pattern for component names
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Dictionary mapping component name to list of test files
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    component_tests = defaultdict(list)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for comp_dir in components_dir.glob(component_pattern):
 | 
				
			||||||
 | 
					        if not comp_dir.is_dir():
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        for test_file in comp_dir.glob("test.*.yaml"):
 | 
				
			||||||
 | 
					            component_tests[comp_dir.name].append(test_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return dict(component_tests)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def parse_test_filename(test_file: Path) -> tuple[str, str]:
 | 
				
			||||||
 | 
					    """Parse test filename to extract test name and platform.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        test_file: Path to test file
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of (test_name, platform)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    parts = test_file.stem.split(".")
 | 
				
			||||||
 | 
					    if len(parts) == 2:
 | 
				
			||||||
 | 
					        return parts[0], parts[1]  # test, platform
 | 
				
			||||||
 | 
					    return parts[0], "all"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
 | 
				
			||||||
 | 
					    """Get all platform base files.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        base_dir: Path to test_build_components directory
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Dictionary mapping platform to list of base files (for version variants)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    platform_files = defaultdict(list)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for base_file in base_dir.glob("build_components_base.*.yaml"):
 | 
				
			||||||
 | 
					        # Extract platform from filename
 | 
				
			||||||
 | 
					        # e.g., build_components_base.esp32-idf.yaml -> esp32-idf
 | 
				
			||||||
 | 
					        # or build_components_base.esp32-idf-50.yaml -> esp32-idf
 | 
				
			||||||
 | 
					        filename = base_file.stem
 | 
				
			||||||
 | 
					        parts = filename.replace("build_components_base.", "").split("-")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Platform is everything before version number (if present)
 | 
				
			||||||
 | 
					        # Check if last part is a number (version)
 | 
				
			||||||
 | 
					        platform = "-".join(parts[:-1]) if parts[-1].isdigit() else "-".join(parts)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        platform_files[platform].append(base_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return dict(platform_files)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def extract_platform_with_version(base_file: Path) -> str:
 | 
				
			||||||
 | 
					    """Extract platform with version from base filename.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        base_file: Path to base file
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Platform with version (e.g., "esp32-idf-50" or "esp32-idf")
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Remove "build_components_base." prefix and ".yaml" suffix
 | 
				
			||||||
 | 
					    return base_file.stem.replace("build_components_base.", "")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def run_esphome_test(
 | 
				
			||||||
 | 
					    component: str,
 | 
				
			||||||
 | 
					    test_file: Path,
 | 
				
			||||||
 | 
					    platform: str,
 | 
				
			||||||
 | 
					    platform_with_version: str,
 | 
				
			||||||
 | 
					    base_file: Path,
 | 
				
			||||||
 | 
					    build_dir: Path,
 | 
				
			||||||
 | 
					    esphome_command: str,
 | 
				
			||||||
 | 
					    continue_on_fail: bool,
 | 
				
			||||||
 | 
					    use_testing_mode: bool = False,
 | 
				
			||||||
 | 
					) -> tuple[bool, str]:
 | 
				
			||||||
 | 
					    """Run esphome test for a single component.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component: Component name
 | 
				
			||||||
 | 
					        test_file: Path to component test file
 | 
				
			||||||
 | 
					        platform: Platform name (e.g., "esp32-idf")
 | 
				
			||||||
 | 
					        platform_with_version: Platform with version (e.g., "esp32-idf-50")
 | 
				
			||||||
 | 
					        base_file: Path to platform base file
 | 
				
			||||||
 | 
					        build_dir: Path to build directory
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command (config/compile)
 | 
				
			||||||
 | 
					        continue_on_fail: Whether to continue on failure
 | 
				
			||||||
 | 
					        use_testing_mode: Whether to use --testing-mode flag
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of (success status, command string)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    test_name = test_file.stem.split(".")[0]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create dynamic test file in build directory
 | 
				
			||||||
 | 
					    output_file = build_dir / f"{component}.{test_name}.{platform_with_version}.yaml"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Copy base file and substitute component test file reference
 | 
				
			||||||
 | 
					    base_content = base_file.read_text()
 | 
				
			||||||
 | 
					    # Get relative path from build dir to test file
 | 
				
			||||||
 | 
					    repo_root = Path(__file__).parent.parent
 | 
				
			||||||
 | 
					    component_test_ref = f"../../{test_file.relative_to(repo_root / 'tests')}"
 | 
				
			||||||
 | 
					    output_content = base_content.replace("$component_test_file", component_test_ref)
 | 
				
			||||||
 | 
					    output_file.write_text(output_content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Build esphome command
 | 
				
			||||||
 | 
					    cmd = [
 | 
				
			||||||
 | 
					        sys.executable,
 | 
				
			||||||
 | 
					        "-m",
 | 
				
			||||||
 | 
					        "esphome",
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Add --testing-mode if needed (must be before subcommand)
 | 
				
			||||||
 | 
					    if use_testing_mode:
 | 
				
			||||||
 | 
					        cmd.append("--testing-mode")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Add substitutions
 | 
				
			||||||
 | 
					    cmd.extend(
 | 
				
			||||||
 | 
					        [
 | 
				
			||||||
 | 
					            "-s",
 | 
				
			||||||
 | 
					            "component_name",
 | 
				
			||||||
 | 
					            component,
 | 
				
			||||||
 | 
					            "-s",
 | 
				
			||||||
 | 
					            "component_dir",
 | 
				
			||||||
 | 
					            f"../../components/{component}",
 | 
				
			||||||
 | 
					            "-s",
 | 
				
			||||||
 | 
					            "test_name",
 | 
				
			||||||
 | 
					            test_name,
 | 
				
			||||||
 | 
					            "-s",
 | 
				
			||||||
 | 
					            "target_platform",
 | 
				
			||||||
 | 
					            platform,
 | 
				
			||||||
 | 
					        ]
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Add command and config file
 | 
				
			||||||
 | 
					    cmd.extend([esphome_command, str(output_file)])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Build command string for display/logging
 | 
				
			||||||
 | 
					    cmd_str = " ".join(cmd)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Run command
 | 
				
			||||||
 | 
					    print(f"> [{component}] [{test_name}] [{platform_with_version}]")
 | 
				
			||||||
 | 
					    if use_testing_mode:
 | 
				
			||||||
 | 
					        print("  (using --testing-mode)")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        result = subprocess.run(cmd, check=False)
 | 
				
			||||||
 | 
					        success = result.returncode == 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Show disk space after build in CI during compile
 | 
				
			||||||
 | 
					        show_disk_space_if_ci(esphome_command)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not success and not continue_on_fail:
 | 
				
			||||||
 | 
					            # Print command immediately for failed tests
 | 
				
			||||||
 | 
					            print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					            print("FAILED - Command to reproduce:")
 | 
				
			||||||
 | 
					            print(f"{'=' * 80}")
 | 
				
			||||||
 | 
					            print(cmd_str)
 | 
				
			||||||
 | 
					            print()
 | 
				
			||||||
 | 
					            raise subprocess.CalledProcessError(result.returncode, cmd)
 | 
				
			||||||
 | 
					        return success, cmd_str
 | 
				
			||||||
 | 
					    except subprocess.CalledProcessError:
 | 
				
			||||||
 | 
					        # Re-raise if we're not continuing on fail
 | 
				
			||||||
 | 
					        if not continue_on_fail:
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					        return False, cmd_str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def run_grouped_test(
 | 
				
			||||||
 | 
					    components: list[str],
 | 
				
			||||||
 | 
					    platform: str,
 | 
				
			||||||
 | 
					    platform_with_version: str,
 | 
				
			||||||
 | 
					    base_file: Path,
 | 
				
			||||||
 | 
					    build_dir: Path,
 | 
				
			||||||
 | 
					    tests_dir: Path,
 | 
				
			||||||
 | 
					    esphome_command: str,
 | 
				
			||||||
 | 
					    continue_on_fail: bool,
 | 
				
			||||||
 | 
					) -> tuple[bool, str]:
 | 
				
			||||||
 | 
					    """Run esphome test for a group of components with shared bus configs.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        components: List of component names to test together
 | 
				
			||||||
 | 
					        platform: Platform name (e.g., "esp32-idf")
 | 
				
			||||||
 | 
					        platform_with_version: Platform with version (e.g., "esp32-idf-50")
 | 
				
			||||||
 | 
					        base_file: Path to platform base file
 | 
				
			||||||
 | 
					        build_dir: Path to build directory
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command (config/compile)
 | 
				
			||||||
 | 
					        continue_on_fail: Whether to continue on failure
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of (success status, command string)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Create merged config
 | 
				
			||||||
 | 
					    group_name = "_".join(components[:3])  # Use first 3 components for name
 | 
				
			||||||
 | 
					    if len(components) > 3:
 | 
				
			||||||
 | 
					        group_name += f"_plus_{len(components) - 3}"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create unique device name by hashing sorted component list + platform
 | 
				
			||||||
 | 
					    # This prevents conflicts when different component groups are tested
 | 
				
			||||||
 | 
					    sorted_components = sorted(components)
 | 
				
			||||||
 | 
					    hash_input = "_".join(sorted_components) + "_" + platform
 | 
				
			||||||
 | 
					    group_hash = hashlib.md5(hash_input.encode()).hexdigest()[:8]
 | 
				
			||||||
 | 
					    device_name = f"comptest{platform.replace('-', '')}{group_hash}"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    merged_config_file = build_dir / f"merged_{group_name}.{platform_with_version}.yaml"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        merge_component_configs(
 | 
				
			||||||
 | 
					            component_names=components,
 | 
				
			||||||
 | 
					            platform=platform_with_version,
 | 
				
			||||||
 | 
					            tests_dir=tests_dir,
 | 
				
			||||||
 | 
					            output_file=merged_config_file,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					    except Exception as e:  # pylint: disable=broad-exception-caught
 | 
				
			||||||
 | 
					        print(f"Error merging configs for {components}: {e}")
 | 
				
			||||||
 | 
					        if not continue_on_fail:
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					        # Return empty command string since we failed before building the command
 | 
				
			||||||
 | 
					        return False, f"# Failed during config merge: {e}"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Create test file that includes merged config
 | 
				
			||||||
 | 
					    output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
 | 
				
			||||||
 | 
					    base_content = base_file.read_text()
 | 
				
			||||||
 | 
					    merged_ref = merged_config_file.name
 | 
				
			||||||
 | 
					    output_content = base_content.replace("$component_test_file", merged_ref)
 | 
				
			||||||
 | 
					    output_file.write_text(output_content)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Build esphome command with --testing-mode
 | 
				
			||||||
 | 
					    cmd = [
 | 
				
			||||||
 | 
					        sys.executable,
 | 
				
			||||||
 | 
					        "-m",
 | 
				
			||||||
 | 
					        "esphome",
 | 
				
			||||||
 | 
					        "--testing-mode",  # Required for grouped tests
 | 
				
			||||||
 | 
					        "-s",
 | 
				
			||||||
 | 
					        "component_name",
 | 
				
			||||||
 | 
					        device_name,  # Use unique hash-based device name
 | 
				
			||||||
 | 
					        "-s",
 | 
				
			||||||
 | 
					        "component_dir",
 | 
				
			||||||
 | 
					        "../../components",
 | 
				
			||||||
 | 
					        "-s",
 | 
				
			||||||
 | 
					        "test_name",
 | 
				
			||||||
 | 
					        "merged",
 | 
				
			||||||
 | 
					        "-s",
 | 
				
			||||||
 | 
					        "target_platform",
 | 
				
			||||||
 | 
					        platform,
 | 
				
			||||||
 | 
					        esphome_command,
 | 
				
			||||||
 | 
					        str(output_file),
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Build command string for display/logging
 | 
				
			||||||
 | 
					    cmd_str = " ".join(cmd)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Run command
 | 
				
			||||||
 | 
					    components_str = ", ".join(components)
 | 
				
			||||||
 | 
					    print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
 | 
				
			||||||
 | 
					    print("  (using --testing-mode)")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        result = subprocess.run(cmd, check=False)
 | 
				
			||||||
 | 
					        success = result.returncode == 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Show disk space after build in CI during compile
 | 
				
			||||||
 | 
					        show_disk_space_if_ci(esphome_command)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not success and not continue_on_fail:
 | 
				
			||||||
 | 
					            # Print command immediately for failed tests
 | 
				
			||||||
 | 
					            print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					            print("FAILED - Command to reproduce:")
 | 
				
			||||||
 | 
					            print(f"{'=' * 80}")
 | 
				
			||||||
 | 
					            print(cmd_str)
 | 
				
			||||||
 | 
					            print()
 | 
				
			||||||
 | 
					            raise subprocess.CalledProcessError(result.returncode, cmd)
 | 
				
			||||||
 | 
					        return success, cmd_str
 | 
				
			||||||
 | 
					    except subprocess.CalledProcessError:
 | 
				
			||||||
 | 
					        # Re-raise if we're not continuing on fail
 | 
				
			||||||
 | 
					        if not continue_on_fail:
 | 
				
			||||||
 | 
					            raise
 | 
				
			||||||
 | 
					        return False, cmd_str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def run_grouped_component_tests(
 | 
				
			||||||
 | 
					    all_tests: dict[str, list[Path]],
 | 
				
			||||||
 | 
					    platform_filter: str | None,
 | 
				
			||||||
 | 
					    platform_bases: dict[str, list[Path]],
 | 
				
			||||||
 | 
					    tests_dir: Path,
 | 
				
			||||||
 | 
					    build_dir: Path,
 | 
				
			||||||
 | 
					    esphome_command: str,
 | 
				
			||||||
 | 
					    continue_on_fail: bool,
 | 
				
			||||||
 | 
					) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
 | 
				
			||||||
 | 
					    """Run grouped component tests.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        all_tests: Dictionary mapping component names to test files
 | 
				
			||||||
 | 
					        platform_filter: Optional platform to filter by
 | 
				
			||||||
 | 
					        platform_bases: Platform base files mapping
 | 
				
			||||||
 | 
					        tests_dir: Path to tests/components directory
 | 
				
			||||||
 | 
					        build_dir: Path to build directory
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command (config/compile)
 | 
				
			||||||
 | 
					        continue_on_fail: Whether to continue on failure
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    tested_components = set()
 | 
				
			||||||
 | 
					    passed_tests = []
 | 
				
			||||||
 | 
					    failed_tests = []
 | 
				
			||||||
 | 
					    failed_commands = {}  # Map test_id to command string
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Group components by platform and bus signature
 | 
				
			||||||
 | 
					    grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
 | 
				
			||||||
 | 
					    print("\n" + "=" * 80)
 | 
				
			||||||
 | 
					    print("Analyzing components for intelligent grouping...")
 | 
				
			||||||
 | 
					    print("=" * 80)
 | 
				
			||||||
 | 
					    component_buses, non_groupable, direct_bus_components = analyze_all_components(
 | 
				
			||||||
 | 
					        tests_dir
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Track why components can't be grouped (for detailed output)
 | 
				
			||||||
 | 
					    non_groupable_reasons = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Group by (platform, bus_signature)
 | 
				
			||||||
 | 
					    for component, platforms in component_buses.items():
 | 
				
			||||||
 | 
					        if component not in all_tests:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Skip components that must be tested in isolation
 | 
				
			||||||
 | 
					        # These are shown separately and should not be in non_groupable_reasons
 | 
				
			||||||
 | 
					        if component in ISOLATED_COMPONENTS:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Skip base bus components (these test the bus platforms themselves)
 | 
				
			||||||
 | 
					        if component in BASE_BUS_COMPONENTS:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Skip components that use local file references or direct bus configs
 | 
				
			||||||
 | 
					        if component in non_groupable:
 | 
				
			||||||
 | 
					            # Track the reason (using pre-calculated results to avoid expensive re-analysis)
 | 
				
			||||||
 | 
					            if component not in non_groupable_reasons:
 | 
				
			||||||
 | 
					                if component in direct_bus_components:
 | 
				
			||||||
 | 
					                    non_groupable_reasons[component] = (
 | 
				
			||||||
 | 
					                        "Defines buses directly (not via packages) - NEEDS MIGRATION"
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                elif uses_local_file_references(tests_dir / component):
 | 
				
			||||||
 | 
					                    non_groupable_reasons[component] = (
 | 
				
			||||||
 | 
					                        "Uses local file references ($component_dir)"
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                elif is_platform_component(tests_dir / component):
 | 
				
			||||||
 | 
					                    non_groupable_reasons[component] = (
 | 
				
			||||||
 | 
					                        "Platform component (abstract base class)"
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    non_groupable_reasons[component] = (
 | 
				
			||||||
 | 
					                        "Uses !extend or !remove directives"
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        for platform, buses in platforms.items():
 | 
				
			||||||
 | 
					            # Skip if platform doesn't match filter
 | 
				
			||||||
 | 
					            if platform_filter and not platform.startswith(platform_filter):
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Create signature for this component's bus configuration
 | 
				
			||||||
 | 
					            # Components with no buses get NO_BUSES_SIGNATURE so they can be grouped together
 | 
				
			||||||
 | 
					            if buses:
 | 
				
			||||||
 | 
					                signature = create_grouping_signature({platform: buses}, platform)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                signature = NO_BUSES_SIGNATURE
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Add to grouped components (including those with no buses)
 | 
				
			||||||
 | 
					            if signature:
 | 
				
			||||||
 | 
					                grouped_components[(platform, signature)].append(component)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Print detailed grouping plan
 | 
				
			||||||
 | 
					    print("\nGrouping Plan:")
 | 
				
			||||||
 | 
					    print("-" * 80)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Show isolated components (must test individually due to known issues)
 | 
				
			||||||
 | 
					    isolated_in_tests = [c for c in ISOLATED_COMPONENTS if c in all_tests]
 | 
				
			||||||
 | 
					    if isolated_in_tests:
 | 
				
			||||||
 | 
					        print(
 | 
				
			||||||
 | 
					            f"\n⚠ {len(isolated_in_tests)} components must be tested in isolation (known build issues):"
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        for comp in sorted(isolated_in_tests):
 | 
				
			||||||
 | 
					            reason = ISOLATED_COMPONENTS[comp]
 | 
				
			||||||
 | 
					            print(f"  - {comp}: {reason}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Show base bus components (test the bus platform implementations)
 | 
				
			||||||
 | 
					    base_bus_in_tests = [c for c in BASE_BUS_COMPONENTS if c in all_tests]
 | 
				
			||||||
 | 
					    if base_bus_in_tests:
 | 
				
			||||||
 | 
					        print(
 | 
				
			||||||
 | 
					            f"\n○ {len(base_bus_in_tests)} base bus platform components (tested individually):"
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        for comp in sorted(base_bus_in_tests):
 | 
				
			||||||
 | 
					            print(f"  - {comp}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Show excluded components with detailed reasons
 | 
				
			||||||
 | 
					    if non_groupable_reasons:
 | 
				
			||||||
 | 
					        excluded_in_tests = [c for c in non_groupable_reasons if c in all_tests]
 | 
				
			||||||
 | 
					        if excluded_in_tests:
 | 
				
			||||||
 | 
					            print(
 | 
				
			||||||
 | 
					                f"\n⚠ {len(excluded_in_tests)} components excluded from grouping (each needs individual build):"
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            # Group by reason to show summary
 | 
				
			||||||
 | 
					            direct_bus = [
 | 
				
			||||||
 | 
					                c
 | 
				
			||||||
 | 
					                for c in excluded_in_tests
 | 
				
			||||||
 | 
					                if "NEEDS MIGRATION" in non_groupable_reasons.get(c, "")
 | 
				
			||||||
 | 
					            ]
 | 
				
			||||||
 | 
					            if direct_bus:
 | 
				
			||||||
 | 
					                print(
 | 
				
			||||||
 | 
					                    f"\n  ⚠⚠⚠ {len(direct_bus)} DEFINE BUSES DIRECTLY - NEED MIGRATION TO PACKAGES:"
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					                for comp in sorted(direct_bus):
 | 
				
			||||||
 | 
					                    print(f"    - {comp}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            other_reasons = [
 | 
				
			||||||
 | 
					                c
 | 
				
			||||||
 | 
					                for c in excluded_in_tests
 | 
				
			||||||
 | 
					                if "NEEDS MIGRATION" not in non_groupable_reasons.get(c, "")
 | 
				
			||||||
 | 
					            ]
 | 
				
			||||||
 | 
					            if other_reasons and len(other_reasons) <= 10:
 | 
				
			||||||
 | 
					                print("\n  Other non-groupable components:")
 | 
				
			||||||
 | 
					                for comp in sorted(other_reasons):
 | 
				
			||||||
 | 
					                    reason = non_groupable_reasons[comp]
 | 
				
			||||||
 | 
					                    print(f"    - {comp}: {reason}")
 | 
				
			||||||
 | 
					            elif other_reasons:
 | 
				
			||||||
 | 
					                print(
 | 
				
			||||||
 | 
					                    f"\n  Other non-groupable components: {len(other_reasons)} components"
 | 
				
			||||||
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Distribute no_buses components into other groups to maximize efficiency
 | 
				
			||||||
 | 
					    # Components with no buses can merge with any bus group since they have no conflicting requirements
 | 
				
			||||||
 | 
					    no_buses_by_platform: dict[str, list[str]] = {}
 | 
				
			||||||
 | 
					    for (platform, signature), components in list(grouped_components.items()):
 | 
				
			||||||
 | 
					        if signature == NO_BUSES_SIGNATURE:
 | 
				
			||||||
 | 
					            no_buses_by_platform[platform] = components
 | 
				
			||||||
 | 
					            # Remove from grouped_components - we'll distribute them
 | 
				
			||||||
 | 
					            del grouped_components[(platform, signature)]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Distribute no_buses components into existing groups for each platform
 | 
				
			||||||
 | 
					    for platform, no_buses_comps in no_buses_by_platform.items():
 | 
				
			||||||
 | 
					        # Find all non-empty groups for this platform (excluding no_buses)
 | 
				
			||||||
 | 
					        platform_groups = [
 | 
				
			||||||
 | 
					            (sig, comps)
 | 
				
			||||||
 | 
					            for (plat, sig), comps in grouped_components.items()
 | 
				
			||||||
 | 
					            if plat == platform and sig != NO_BUSES_SIGNATURE
 | 
				
			||||||
 | 
					        ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if platform_groups:
 | 
				
			||||||
 | 
					            # Distribute no_buses components round-robin across existing groups
 | 
				
			||||||
 | 
					            for i, comp in enumerate(no_buses_comps):
 | 
				
			||||||
 | 
					                sig, _ = platform_groups[i % len(platform_groups)]
 | 
				
			||||||
 | 
					                grouped_components[(platform, sig)].append(comp)
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # No other groups for this platform - keep no_buses components together
 | 
				
			||||||
 | 
					            grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Split groups that exceed platform-specific maximum sizes
 | 
				
			||||||
 | 
					    # ESP8266 has limited IRAM and can't handle large component groups
 | 
				
			||||||
 | 
					    split_groups = {}
 | 
				
			||||||
 | 
					    for (platform, signature), components in list(grouped_components.items()):
 | 
				
			||||||
 | 
					        max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
 | 
				
			||||||
 | 
					        if max_size and len(components) > max_size:
 | 
				
			||||||
 | 
					            # Split this group into smaller groups
 | 
				
			||||||
 | 
					            print(
 | 
				
			||||||
 | 
					                f"\n  ℹ️ Splitting {platform} group (signature: {signature}) "
 | 
				
			||||||
 | 
					                f"from {len(components)} to max {max_size} components per group"
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            # Remove original group
 | 
				
			||||||
 | 
					            del grouped_components[(platform, signature)]
 | 
				
			||||||
 | 
					            # Create split groups
 | 
				
			||||||
 | 
					            for i in range(0, len(components), max_size):
 | 
				
			||||||
 | 
					                split_components = components[i : i + max_size]
 | 
				
			||||||
 | 
					                # Create unique signature for each split group
 | 
				
			||||||
 | 
					                split_signature = f"{signature}_split{i // max_size + 1}"
 | 
				
			||||||
 | 
					                split_groups[(platform, split_signature)] = split_components
 | 
				
			||||||
 | 
					    # Add split groups back
 | 
				
			||||||
 | 
					    grouped_components.update(split_groups)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    groups_to_test = []
 | 
				
			||||||
 | 
					    individual_tests = set()  # Use set to avoid duplicates
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    for (platform, signature), components in sorted(grouped_components.items()):
 | 
				
			||||||
 | 
					        if len(components) > 1:
 | 
				
			||||||
 | 
					            groups_to_test.append((platform, signature, components))
 | 
				
			||||||
 | 
					        # Note: Don't add single-component groups to individual_tests here
 | 
				
			||||||
 | 
					        # They'll be added below when we check for ungrouped components
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Add components that weren't grouped on any platform
 | 
				
			||||||
 | 
					    for component in all_tests:
 | 
				
			||||||
 | 
					        if component not in [c for _, _, comps in groups_to_test for c in comps]:
 | 
				
			||||||
 | 
					            individual_tests.add(component)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if groups_to_test:
 | 
				
			||||||
 | 
					        print(f"\n✓ {len(groups_to_test)} groups will be tested together:")
 | 
				
			||||||
 | 
					        for platform, signature, components in groups_to_test:
 | 
				
			||||||
 | 
					            component_list = ", ".join(sorted(components))
 | 
				
			||||||
 | 
					            print(f"  [{platform}] [{signature}]: {component_list}")
 | 
				
			||||||
 | 
					            print(
 | 
				
			||||||
 | 
					                f"    → {len(components)} components in 1 build (saves {len(components) - 1} builds)"
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if individual_tests:
 | 
				
			||||||
 | 
					        print(f"\n○ {len(individual_tests)} components will be tested individually:")
 | 
				
			||||||
 | 
					        sorted_individual = sorted(individual_tests)
 | 
				
			||||||
 | 
					        for comp in sorted_individual[:10]:
 | 
				
			||||||
 | 
					            print(f"  - {comp}")
 | 
				
			||||||
 | 
					        if len(individual_tests) > 10:
 | 
				
			||||||
 | 
					            print(f"  ... and {len(individual_tests) - 10} more")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Calculate actual build counts based on test files, not component counts
 | 
				
			||||||
 | 
					    # Without grouping: every test file would be built separately
 | 
				
			||||||
 | 
					    total_test_files = sum(len(test_files) for test_files in all_tests.values())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # With grouping:
 | 
				
			||||||
 | 
					    # - 1 build per group (regardless of how many components)
 | 
				
			||||||
 | 
					    # - Individual components still need all their platform builds
 | 
				
			||||||
 | 
					    individual_test_file_count = sum(
 | 
				
			||||||
 | 
					        len(all_tests[comp]) for comp in individual_tests if comp in all_tests
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    total_grouped_components = sum(len(comps) for _, _, comps in groups_to_test)
 | 
				
			||||||
 | 
					    total_builds_with_grouping = len(groups_to_test) + individual_test_file_count
 | 
				
			||||||
 | 
					    builds_saved = total_test_files - total_builds_with_grouping
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					    print(
 | 
				
			||||||
 | 
					        f"Summary: {total_builds_with_grouping} builds total (vs {total_test_files} without grouping)"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    print(
 | 
				
			||||||
 | 
					        f"  • {len(groups_to_test)} grouped builds ({total_grouped_components} components)"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    print(
 | 
				
			||||||
 | 
					        f"  • {individual_test_file_count} individual builds ({len(individual_tests)} components)"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    if total_test_files > 0:
 | 
				
			||||||
 | 
					        reduction_pct = (builds_saved / total_test_files) * 100
 | 
				
			||||||
 | 
					        print(f"  • Saves {builds_saved} builds ({reduction_pct:.1f}% reduction)")
 | 
				
			||||||
 | 
					    print("=" * 80 + "\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Execute grouped tests
 | 
				
			||||||
 | 
					    for (platform, signature), components in grouped_components.items():
 | 
				
			||||||
 | 
					        # Only group if we have multiple components with same signature
 | 
				
			||||||
 | 
					        if len(components) <= 1:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Filter out components not in our test list
 | 
				
			||||||
 | 
					        components_to_group = [c for c in components if c in all_tests]
 | 
				
			||||||
 | 
					        if len(components_to_group) <= 1:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Get platform base files
 | 
				
			||||||
 | 
					        if platform not in platform_bases:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        for base_file in platform_bases[platform]:
 | 
				
			||||||
 | 
					            platform_with_version = extract_platform_with_version(base_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Skip if platform filter doesn't match
 | 
				
			||||||
 | 
					            if platform_filter and platform != platform_filter:
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					            if (
 | 
				
			||||||
 | 
					                platform_filter
 | 
				
			||||||
 | 
					                and platform_with_version != platform_filter
 | 
				
			||||||
 | 
					                and not platform_with_version.startswith(f"{platform_filter}-")
 | 
				
			||||||
 | 
					            ):
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Run grouped test
 | 
				
			||||||
 | 
					            success, cmd_str = run_grouped_test(
 | 
				
			||||||
 | 
					                components=components_to_group,
 | 
				
			||||||
 | 
					                platform=platform,
 | 
				
			||||||
 | 
					                platform_with_version=platform_with_version,
 | 
				
			||||||
 | 
					                base_file=base_file,
 | 
				
			||||||
 | 
					                build_dir=build_dir,
 | 
				
			||||||
 | 
					                tests_dir=tests_dir,
 | 
				
			||||||
 | 
					                esphome_command=esphome_command,
 | 
				
			||||||
 | 
					                continue_on_fail=continue_on_fail,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Mark all components as tested
 | 
				
			||||||
 | 
					            for comp in components_to_group:
 | 
				
			||||||
 | 
					                tested_components.add((comp, platform_with_version))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Record result for each component - show all components in grouped tests
 | 
				
			||||||
 | 
					            test_id = (
 | 
				
			||||||
 | 
					                f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            if success:
 | 
				
			||||||
 | 
					                passed_tests.append(test_id)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                failed_tests.append(test_id)
 | 
				
			||||||
 | 
					                failed_commands[test_id] = cmd_str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return tested_components, passed_tests, failed_tests, failed_commands
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def run_individual_component_test(
 | 
				
			||||||
 | 
					    component: str,
 | 
				
			||||||
 | 
					    test_file: Path,
 | 
				
			||||||
 | 
					    platform: str,
 | 
				
			||||||
 | 
					    platform_with_version: str,
 | 
				
			||||||
 | 
					    base_file: Path,
 | 
				
			||||||
 | 
					    build_dir: Path,
 | 
				
			||||||
 | 
					    esphome_command: str,
 | 
				
			||||||
 | 
					    continue_on_fail: bool,
 | 
				
			||||||
 | 
					    tested_components: set[tuple[str, str]],
 | 
				
			||||||
 | 
					    passed_tests: list[str],
 | 
				
			||||||
 | 
					    failed_tests: list[str],
 | 
				
			||||||
 | 
					    failed_commands: dict[str, str],
 | 
				
			||||||
 | 
					) -> None:
 | 
				
			||||||
 | 
					    """Run an individual component test if not already tested in a group.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component: Component name
 | 
				
			||||||
 | 
					        test_file: Test file path
 | 
				
			||||||
 | 
					        platform: Platform name
 | 
				
			||||||
 | 
					        platform_with_version: Platform with version
 | 
				
			||||||
 | 
					        base_file: Base file for platform
 | 
				
			||||||
 | 
					        build_dir: Build directory
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command
 | 
				
			||||||
 | 
					        continue_on_fail: Whether to continue on failure
 | 
				
			||||||
 | 
					        tested_components: Set of already tested components
 | 
				
			||||||
 | 
					        passed_tests: List to append passed test IDs
 | 
				
			||||||
 | 
					        failed_tests: List to append failed test IDs
 | 
				
			||||||
 | 
					        failed_commands: Dict to store failed test commands
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Skip if already tested in a group
 | 
				
			||||||
 | 
					    if (component, platform_with_version) in tested_components:
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    test_name = test_file.stem.split(".")[0]
 | 
				
			||||||
 | 
					    success, cmd_str = run_esphome_test(
 | 
				
			||||||
 | 
					        component=component,
 | 
				
			||||||
 | 
					        test_file=test_file,
 | 
				
			||||||
 | 
					        platform=platform,
 | 
				
			||||||
 | 
					        platform_with_version=platform_with_version,
 | 
				
			||||||
 | 
					        base_file=base_file,
 | 
				
			||||||
 | 
					        build_dir=build_dir,
 | 
				
			||||||
 | 
					        esphome_command=esphome_command,
 | 
				
			||||||
 | 
					        continue_on_fail=continue_on_fail,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    test_id = f"{component}.{test_name}.{platform_with_version}"
 | 
				
			||||||
 | 
					    if success:
 | 
				
			||||||
 | 
					        passed_tests.append(test_id)
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        failed_tests.append(test_id)
 | 
				
			||||||
 | 
					        failed_commands[test_id] = cmd_str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def test_components(
 | 
				
			||||||
 | 
					    component_patterns: list[str],
 | 
				
			||||||
 | 
					    platform_filter: str | None,
 | 
				
			||||||
 | 
					    esphome_command: str,
 | 
				
			||||||
 | 
					    continue_on_fail: bool,
 | 
				
			||||||
 | 
					    enable_grouping: bool = True,
 | 
				
			||||||
 | 
					) -> int:
 | 
				
			||||||
 | 
					    """Test components with optional intelligent grouping.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        component_patterns: List of component name patterns
 | 
				
			||||||
 | 
					        platform_filter: Optional platform to filter by
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command (config/compile)
 | 
				
			||||||
 | 
					        continue_on_fail: Whether to continue on failure
 | 
				
			||||||
 | 
					        enable_grouping: Whether to enable component grouping
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        Exit code (0 for success, 1 for failure)
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    # Setup paths
 | 
				
			||||||
 | 
					    repo_root = Path(__file__).parent.parent
 | 
				
			||||||
 | 
					    tests_dir = repo_root / "tests" / "components"
 | 
				
			||||||
 | 
					    build_components_dir = repo_root / "tests" / "test_build_components"
 | 
				
			||||||
 | 
					    build_dir = build_components_dir / "build"
 | 
				
			||||||
 | 
					    build_dir.mkdir(parents=True, exist_ok=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Get platform base files
 | 
				
			||||||
 | 
					    platform_bases = get_platform_base_files(build_components_dir)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Find all component tests
 | 
				
			||||||
 | 
					    all_tests = {}
 | 
				
			||||||
 | 
					    for pattern in component_patterns:
 | 
				
			||||||
 | 
					        all_tests.update(find_component_tests(tests_dir, pattern))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not all_tests:
 | 
				
			||||||
 | 
					        print(f"No components found matching: {component_patterns}")
 | 
				
			||||||
 | 
					        return 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"Found {len(all_tests)} components to test")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Run tests
 | 
				
			||||||
 | 
					    failed_tests = []
 | 
				
			||||||
 | 
					    passed_tests = []
 | 
				
			||||||
 | 
					    tested_components = set()  # Track which components were tested in groups
 | 
				
			||||||
 | 
					    failed_commands = {}  # Track commands for failed tests
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # First, run grouped tests if grouping is enabled
 | 
				
			||||||
 | 
					    if enable_grouping:
 | 
				
			||||||
 | 
					        (
 | 
				
			||||||
 | 
					            tested_components,
 | 
				
			||||||
 | 
					            passed_tests,
 | 
				
			||||||
 | 
					            failed_tests,
 | 
				
			||||||
 | 
					            failed_commands,
 | 
				
			||||||
 | 
					        ) = run_grouped_component_tests(
 | 
				
			||||||
 | 
					            all_tests=all_tests,
 | 
				
			||||||
 | 
					            platform_filter=platform_filter,
 | 
				
			||||||
 | 
					            platform_bases=platform_bases,
 | 
				
			||||||
 | 
					            tests_dir=tests_dir,
 | 
				
			||||||
 | 
					            build_dir=build_dir,
 | 
				
			||||||
 | 
					            esphome_command=esphome_command,
 | 
				
			||||||
 | 
					            continue_on_fail=continue_on_fail,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Then run individual tests for components not in groups
 | 
				
			||||||
 | 
					    for component, test_files in sorted(all_tests.items()):
 | 
				
			||||||
 | 
					        for test_file in test_files:
 | 
				
			||||||
 | 
					            test_name, platform = parse_test_filename(test_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Handle "all" platform tests
 | 
				
			||||||
 | 
					            if platform == "all":
 | 
				
			||||||
 | 
					                # Run for all platforms
 | 
				
			||||||
 | 
					                for plat, base_files in platform_bases.items():
 | 
				
			||||||
 | 
					                    if platform_filter and plat != platform_filter:
 | 
				
			||||||
 | 
					                        continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    for base_file in base_files:
 | 
				
			||||||
 | 
					                        platform_with_version = extract_platform_with_version(base_file)
 | 
				
			||||||
 | 
					                        run_individual_component_test(
 | 
				
			||||||
 | 
					                            component=component,
 | 
				
			||||||
 | 
					                            test_file=test_file,
 | 
				
			||||||
 | 
					                            platform=plat,
 | 
				
			||||||
 | 
					                            platform_with_version=platform_with_version,
 | 
				
			||||||
 | 
					                            base_file=base_file,
 | 
				
			||||||
 | 
					                            build_dir=build_dir,
 | 
				
			||||||
 | 
					                            esphome_command=esphome_command,
 | 
				
			||||||
 | 
					                            continue_on_fail=continue_on_fail,
 | 
				
			||||||
 | 
					                            tested_components=tested_components,
 | 
				
			||||||
 | 
					                            passed_tests=passed_tests,
 | 
				
			||||||
 | 
					                            failed_tests=failed_tests,
 | 
				
			||||||
 | 
					                            failed_commands=failed_commands,
 | 
				
			||||||
 | 
					                        )
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                # Platform-specific test
 | 
				
			||||||
 | 
					                if platform_filter and platform != platform_filter:
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if platform not in platform_bases:
 | 
				
			||||||
 | 
					                    print(f"No base file for platform: {platform}")
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                for base_file in platform_bases[platform]:
 | 
				
			||||||
 | 
					                    platform_with_version = extract_platform_with_version(base_file)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # Skip if requested platform doesn't match
 | 
				
			||||||
 | 
					                    if (
 | 
				
			||||||
 | 
					                        platform_filter
 | 
				
			||||||
 | 
					                        and platform_with_version != platform_filter
 | 
				
			||||||
 | 
					                        and not platform_with_version.startswith(f"{platform_filter}-")
 | 
				
			||||||
 | 
					                    ):
 | 
				
			||||||
 | 
					                        continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    run_individual_component_test(
 | 
				
			||||||
 | 
					                        component=component,
 | 
				
			||||||
 | 
					                        test_file=test_file,
 | 
				
			||||||
 | 
					                        platform=platform,
 | 
				
			||||||
 | 
					                        platform_with_version=platform_with_version,
 | 
				
			||||||
 | 
					                        base_file=base_file,
 | 
				
			||||||
 | 
					                        build_dir=build_dir,
 | 
				
			||||||
 | 
					                        esphome_command=esphome_command,
 | 
				
			||||||
 | 
					                        continue_on_fail=continue_on_fail,
 | 
				
			||||||
 | 
					                        tested_components=tested_components,
 | 
				
			||||||
 | 
					                        passed_tests=passed_tests,
 | 
				
			||||||
 | 
					                        failed_tests=failed_tests,
 | 
				
			||||||
 | 
					                        failed_commands=failed_commands,
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Print summary
 | 
				
			||||||
 | 
					    print("\n" + "=" * 80)
 | 
				
			||||||
 | 
					    print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
 | 
				
			||||||
 | 
					    print("=" * 80)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if failed_tests:
 | 
				
			||||||
 | 
					        print("\nFailed tests:")
 | 
				
			||||||
 | 
					        for test in failed_tests:
 | 
				
			||||||
 | 
					            print(f"  - {test}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Print failed commands at the end for easy copy-paste from CI logs
 | 
				
			||||||
 | 
					        print("\n" + "=" * 80)
 | 
				
			||||||
 | 
					        print("Failed test commands (copy-paste to reproduce locally):")
 | 
				
			||||||
 | 
					        print("=" * 80)
 | 
				
			||||||
 | 
					        for test in failed_tests:
 | 
				
			||||||
 | 
					            if test in failed_commands:
 | 
				
			||||||
 | 
					                print(f"\n# {test}")
 | 
				
			||||||
 | 
					                print(failed_commands[test])
 | 
				
			||||||
 | 
					        print()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        return 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main() -> int:
 | 
				
			||||||
 | 
					    """Main entry point."""
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description="Test ESPHome component builds with intelligent grouping"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "-e",
 | 
				
			||||||
 | 
					        "--esphome-command",
 | 
				
			||||||
 | 
					        default="compile",
 | 
				
			||||||
 | 
					        choices=["config", "compile", "clean"],
 | 
				
			||||||
 | 
					        help="ESPHome command to run (default: compile)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "-c",
 | 
				
			||||||
 | 
					        "--components",
 | 
				
			||||||
 | 
					        default="*",
 | 
				
			||||||
 | 
					        help="Component pattern(s) to test (default: *). Comma-separated.",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "-t",
 | 
				
			||||||
 | 
					        "--target",
 | 
				
			||||||
 | 
					        help="Target platform to test (e.g., esp32-idf)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "-f",
 | 
				
			||||||
 | 
					        "--continue-on-fail",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Continue testing even if a test fails",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--no-grouping",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Disable component grouping (test each component individually)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Parse component patterns
 | 
				
			||||||
 | 
					    component_patterns = [p.strip() for p in args.components.split(",")]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return test_components(
 | 
				
			||||||
 | 
					        component_patterns=component_patterns,
 | 
				
			||||||
 | 
					        platform_filter=args.target,
 | 
				
			||||||
 | 
					        esphome_command=args.esphome_command,
 | 
				
			||||||
 | 
					        continue_on_fail=args.continue_on_fail,
 | 
				
			||||||
 | 
					        enable_grouping=not args.no_grouping,
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    sys.exit(main())
 | 
				
			||||||
							
								
								
									
										227
									
								
								script/test_component_grouping.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										227
									
								
								script/test_component_grouping.py
									
									
									
									
									
										Executable file
									
								
							@@ -0,0 +1,227 @@
 | 
				
			|||||||
 | 
					#!/usr/bin/env python3
 | 
				
			||||||
 | 
					"""Test component grouping by finding and testing groups of components.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					This script analyzes components, finds groups that can be tested together,
 | 
				
			||||||
 | 
					and runs test builds for those groups.
 | 
				
			||||||
 | 
					"""
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from __future__ import annotations
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import argparse
 | 
				
			||||||
 | 
					from pathlib import Path
 | 
				
			||||||
 | 
					import subprocess
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# Add esphome to path
 | 
				
			||||||
 | 
					sys.path.insert(0, str(Path(__file__).parent.parent))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from script.analyze_component_buses import (
 | 
				
			||||||
 | 
					    analyze_all_components,
 | 
				
			||||||
 | 
					    group_components_by_signature,
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def test_component_group(
 | 
				
			||||||
 | 
					    components: list[str],
 | 
				
			||||||
 | 
					    platform: str,
 | 
				
			||||||
 | 
					    esphome_command: str = "compile",
 | 
				
			||||||
 | 
					    dry_run: bool = False,
 | 
				
			||||||
 | 
					) -> bool:
 | 
				
			||||||
 | 
					    """Test a group of components together.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Args:
 | 
				
			||||||
 | 
					        components: List of component names to test together
 | 
				
			||||||
 | 
					        platform: Platform to test on (e.g., "esp32-idf")
 | 
				
			||||||
 | 
					        esphome_command: ESPHome command to run (config/compile/clean)
 | 
				
			||||||
 | 
					        dry_run: If True, only print the command without running it
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Returns:
 | 
				
			||||||
 | 
					        True if test passed, False otherwise
 | 
				
			||||||
 | 
					    """
 | 
				
			||||||
 | 
					    components_str = ",".join(components)
 | 
				
			||||||
 | 
					    cmd = [
 | 
				
			||||||
 | 
					        "./script/test_build_components",
 | 
				
			||||||
 | 
					        "-c",
 | 
				
			||||||
 | 
					        components_str,
 | 
				
			||||||
 | 
					        "-t",
 | 
				
			||||||
 | 
					        platform,
 | 
				
			||||||
 | 
					        "-e",
 | 
				
			||||||
 | 
					        esphome_command,
 | 
				
			||||||
 | 
					    ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					    print(f"Testing {len(components)} components on {platform}:")
 | 
				
			||||||
 | 
					    for comp in components:
 | 
				
			||||||
 | 
					        print(f"  - {comp}")
 | 
				
			||||||
 | 
					    print(f"{'=' * 80}")
 | 
				
			||||||
 | 
					    print(f"Command: {' '.join(cmd)}\n")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if dry_run:
 | 
				
			||||||
 | 
					        print("[DRY RUN] Skipping actual test")
 | 
				
			||||||
 | 
					        return True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try:
 | 
				
			||||||
 | 
					        result = subprocess.run(cmd, check=False)
 | 
				
			||||||
 | 
					        return result.returncode == 0
 | 
				
			||||||
 | 
					    except Exception as e:
 | 
				
			||||||
 | 
					        print(f"Error running test: {e}")
 | 
				
			||||||
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def main() -> None:
 | 
				
			||||||
 | 
					    """Main entry point."""
 | 
				
			||||||
 | 
					    parser = argparse.ArgumentParser(
 | 
				
			||||||
 | 
					        description="Test component grouping by finding and testing groups"
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--platform",
 | 
				
			||||||
 | 
					        "-p",
 | 
				
			||||||
 | 
					        default="esp32-idf",
 | 
				
			||||||
 | 
					        help="Platform to test (default: esp32-idf)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "-e",
 | 
				
			||||||
 | 
					        "--esphome-command",
 | 
				
			||||||
 | 
					        default="compile",
 | 
				
			||||||
 | 
					        choices=["config", "compile", "clean"],
 | 
				
			||||||
 | 
					        help="ESPHome command to run (default: compile)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--all",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Test all components (sets --min-size=1, --max-size=10000, --max-groups=10000)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--min-size",
 | 
				
			||||||
 | 
					        type=int,
 | 
				
			||||||
 | 
					        default=3,
 | 
				
			||||||
 | 
					        help="Minimum group size to test (default: 3)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--max-size",
 | 
				
			||||||
 | 
					        type=int,
 | 
				
			||||||
 | 
					        default=10,
 | 
				
			||||||
 | 
					        help="Maximum group size to test (default: 10)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--max-groups",
 | 
				
			||||||
 | 
					        type=int,
 | 
				
			||||||
 | 
					        default=5,
 | 
				
			||||||
 | 
					        help="Maximum number of groups to test (default: 5)",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--signature",
 | 
				
			||||||
 | 
					        "-s",
 | 
				
			||||||
 | 
					        help="Only test groups with this bus signature (e.g., 'spi', 'i2c', 'uart')",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					    parser.add_argument(
 | 
				
			||||||
 | 
					        "--dry-run",
 | 
				
			||||||
 | 
					        action="store_true",
 | 
				
			||||||
 | 
					        help="Print commands without running them",
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    args = parser.parse_args()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # If --all is specified, test all components without grouping
 | 
				
			||||||
 | 
					    if args.all:
 | 
				
			||||||
 | 
					        # Get all components from tests/components directory
 | 
				
			||||||
 | 
					        components_dir = Path("tests/components")
 | 
				
			||||||
 | 
					        all_components = sorted(
 | 
				
			||||||
 | 
					            [d.name for d in components_dir.iterdir() if d.is_dir()]
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not all_components:
 | 
				
			||||||
 | 
					            print(f"\nNo components found in {components_dir}")
 | 
				
			||||||
 | 
					            return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        print(f"\nTesting all {len(all_components)} components together")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        success = test_component_group(
 | 
				
			||||||
 | 
					            all_components, args.platform, args.esphome_command, args.dry_run
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Print summary
 | 
				
			||||||
 | 
					        print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					        print("TEST SUMMARY")
 | 
				
			||||||
 | 
					        print(f"{'=' * 80}")
 | 
				
			||||||
 | 
					        status = "✅ PASS" if success else "❌ FAIL"
 | 
				
			||||||
 | 
					        print(f"{status} All components: {len(all_components)} components")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not args.dry_run and not success:
 | 
				
			||||||
 | 
					            sys.exit(1)
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print("Analyzing all components...")
 | 
				
			||||||
 | 
					    components, non_groupable, _ = analyze_all_components(Path("tests/components"))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"Found {len(components)} components, {len(non_groupable)} non-groupable")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Group components by signature for the platform
 | 
				
			||||||
 | 
					    groups = group_components_by_signature(components, args.platform)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Filter and sort groups
 | 
				
			||||||
 | 
					    filtered_groups = []
 | 
				
			||||||
 | 
					    for signature, comp_list in groups.items():
 | 
				
			||||||
 | 
					        # Filter by signature if specified
 | 
				
			||||||
 | 
					        if args.signature and signature != args.signature:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Remove non-groupable components
 | 
				
			||||||
 | 
					        comp_list = [c for c in comp_list if c not in non_groupable]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # Filter by minimum size
 | 
				
			||||||
 | 
					        if len(comp_list) < args.min_size:
 | 
				
			||||||
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # If group is larger than max_size, we'll take a subset later
 | 
				
			||||||
 | 
					        filtered_groups.append((signature, comp_list))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Sort by group size (largest first)
 | 
				
			||||||
 | 
					    filtered_groups.sort(key=lambda x: len(x[1]), reverse=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Limit number of groups
 | 
				
			||||||
 | 
					    filtered_groups = filtered_groups[: args.max_groups]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if not filtered_groups:
 | 
				
			||||||
 | 
					        print("\nNo groups found matching criteria:")
 | 
				
			||||||
 | 
					        print(f"  - Platform: {args.platform}")
 | 
				
			||||||
 | 
					        print(f"  - Size: {args.min_size}-{args.max_size}")
 | 
				
			||||||
 | 
					        if args.signature:
 | 
				
			||||||
 | 
					            print(f"  - Signature: {args.signature}")
 | 
				
			||||||
 | 
					        return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    print(f"\nFound {len(filtered_groups)} groups to test:")
 | 
				
			||||||
 | 
					    for signature, comp_list in filtered_groups:
 | 
				
			||||||
 | 
					        print(f"  [{signature}]: {len(comp_list)} components")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Test each group
 | 
				
			||||||
 | 
					    results = []
 | 
				
			||||||
 | 
					    for signature, comp_list in filtered_groups:
 | 
				
			||||||
 | 
					        # Limit to max_size if group is larger
 | 
				
			||||||
 | 
					        if len(comp_list) > args.max_size:
 | 
				
			||||||
 | 
					            comp_list = comp_list[: args.max_size]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        success = test_component_group(
 | 
				
			||||||
 | 
					            comp_list, args.platform, args.esphome_command, args.dry_run
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        results.append((signature, comp_list, success))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if not args.dry_run and not success:
 | 
				
			||||||
 | 
					            print(f"\n❌ FAILED: {signature} group")
 | 
				
			||||||
 | 
					            break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Print summary
 | 
				
			||||||
 | 
					    print(f"\n{'=' * 80}")
 | 
				
			||||||
 | 
					    print("TEST SUMMARY")
 | 
				
			||||||
 | 
					    print(f"{'=' * 80}")
 | 
				
			||||||
 | 
					    for signature, comp_list, success in results:
 | 
				
			||||||
 | 
					        status = "✅ PASS" if success else "❌ FAIL"
 | 
				
			||||||
 | 
					        print(f"{status} [{signature}]: {len(comp_list)} components")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Exit with error if any tests failed
 | 
				
			||||||
 | 
					    if not args.dry_run and any(not success for _, _, success in results):
 | 
				
			||||||
 | 
					        sys.exit(1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					if __name__ == "__main__":
 | 
				
			||||||
 | 
					    main()
 | 
				
			||||||
@@ -69,7 +69,7 @@ def run_schema_validation(config: ConfigType) -> None:
 | 
				
			|||||||
            {
 | 
					            {
 | 
				
			||||||
                "id": "display_id",
 | 
					                "id": "display_id",
 | 
				
			||||||
                "model": "custom",
 | 
					                "model": "custom",
 | 
				
			||||||
                "dimensions": {"width": 320, "height": 240},
 | 
					                "dimensions": {"width": 260, "height": 260},
 | 
				
			||||||
                "draw_rounding": 13,
 | 
					                "draw_rounding": 13,
 | 
				
			||||||
                "init_sequence": [[0xA0, 0x01]],
 | 
					                "init_sequence": [[0xA0, 0x01]],
 | 
				
			||||||
            },
 | 
					            },
 | 
				
			||||||
@@ -336,7 +336,7 @@ def test_native_generation(
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    main_cpp = generate_main(component_fixture_path("native.yaml"))
 | 
					    main_cpp = generate_main(component_fixture_path("native.yaml"))
 | 
				
			||||||
    assert (
 | 
					    assert (
 | 
				
			||||||
        "mipi_spi::MipiSpiBuffer<uint16_t, mipi_spi::PIXEL_MODE_16, true, mipi_spi::PIXEL_MODE_16, mipi_spi::BUS_TYPE_QUAD, 360, 360, 0, 1, display::DISPLAY_ROTATION_0_DEGREES, 1>()"
 | 
					        "mipi_spi::MipiSpiBuffer<uint16_t, mipi_spi::PIXEL_MODE_16, true, mipi_spi::PIXEL_MODE_16, mipi_spi::BUS_TYPE_QUAD, 360, 360, 0, 1, display::DISPLAY_ROTATION_0_DEGREES, 1, 1>()"
 | 
				
			||||||
        in main_cpp
 | 
					        in main_cpp
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    assert "set_init_sequence({240, 1, 8, 242" in main_cpp
 | 
					    assert "set_init_sequence({240, 1, 8, 242" in main_cpp
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,11 +1,4 @@
 | 
				
			|||||||
uart:
 | 
					 | 
				
			||||||
  - id: uart_a01nyub
 | 
					 | 
				
			||||||
    tx_pin: ${tx_pin}
 | 
					 | 
				
			||||||
    rx_pin: ${rx_pin}
 | 
					 | 
				
			||||||
    baud_rate: 9600
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
sensor:
 | 
					sensor:
 | 
				
			||||||
  - platform: a01nyub
 | 
					  - platform: a01nyub
 | 
				
			||||||
    id: a01nyub_sensor
 | 
					    id: a01nyub_sensor
 | 
				
			||||||
    name: a01nyub Distance
 | 
					    name: a01nyub Distance
 | 
				
			||||||
    uart_id: uart_a01nyub
 | 
					 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  tx_pin: GPIO17
 | 
					 | 
				
			||||||
  rx_pin: GPIO16
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,5 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  tx_pin: GPIO4
 | 
					 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,3 +1,6 @@
 | 
				
			|||||||
 | 
					packages:
 | 
				
			||||||
 | 
					  uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  tx_pin: GPIO4
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					  rx_pin: GPIO5
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,8 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  tx_pin: GPIO17
 | 
					  tx_pin: GPIO4
 | 
				
			||||||
  rx_pin: GPIO16
 | 
					  rx_pin: GPIO5
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					packages:
 | 
				
			||||||
 | 
					  uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,4 @@
 | 
				
			|||||||
substitutions:
 | 
					packages:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,4 @@
 | 
				
			|||||||
substitutions:
 | 
					packages:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,11 +1,4 @@
 | 
				
			|||||||
uart:
 | 
					 | 
				
			||||||
  - id: uart_a02yyuw
 | 
					 | 
				
			||||||
    tx_pin: ${tx_pin}
 | 
					 | 
				
			||||||
    rx_pin: ${rx_pin}
 | 
					 | 
				
			||||||
    baud_rate: 9600
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
sensor:
 | 
					sensor:
 | 
				
			||||||
  - platform: a02yyuw
 | 
					  - platform: a02yyuw
 | 
				
			||||||
    id: a02yyuw_sensor
 | 
					    id: a02yyuw_sensor
 | 
				
			||||||
    name: a02yyuw Distance
 | 
					    name: a02yyuw Distance
 | 
				
			||||||
    uart_id: uart_a02yyuw
 | 
					 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  tx_pin: GPIO17
 | 
					 | 
				
			||||||
  rx_pin: GPIO16
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,5 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  tx_pin: GPIO4
 | 
					 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,3 +1,6 @@
 | 
				
			|||||||
 | 
					packages:
 | 
				
			||||||
 | 
					  uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  tx_pin: GPIO4
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					  rx_pin: GPIO5
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,8 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  tx_pin: GPIO17
 | 
					  tx_pin: GPIO4
 | 
				
			||||||
  rx_pin: GPIO16
 | 
					  rx_pin: GPIO5
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					packages:
 | 
				
			||||||
 | 
					  uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,4 @@
 | 
				
			|||||||
substitutions:
 | 
					packages:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,4 @@
 | 
				
			|||||||
substitutions:
 | 
					packages:
 | 
				
			||||||
  tx_pin: GPIO4
 | 
					  uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
 | 
				
			||||||
  rx_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  step_pin: GPIO22
 | 
					 | 
				
			||||||
  dir_pin: GPIO23
 | 
					 | 
				
			||||||
  sleep_pin: GPIO25
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,6 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  step_pin: GPIO2
 | 
					 | 
				
			||||||
  dir_pin: GPIO3
 | 
					 | 
				
			||||||
  sleep_pin: GPIO5
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  step_pin: GPIO22
 | 
					  step_pin: GPIO22
 | 
				
			||||||
  dir_pin: GPIO23
 | 
					  dir_pin: GPIO4
 | 
				
			||||||
  sleep_pin: GPIO25
 | 
					  sleep_pin: GPIO25
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  step_pin: GPIO1
 | 
					  step_pin: GPIO1
 | 
				
			||||||
  dir_pin: GPIO2
 | 
					  dir_pin: GPIO2
 | 
				
			||||||
  sleep_pin: GPIO5
 | 
					  sleep_pin: GPIO0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +1,5 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  gate_pin: GPIO18
 | 
					  gate_pin: GPIO4
 | 
				
			||||||
  zero_cross_pin: GPIO19
 | 
					  zero_cross_pin: GPIO5
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,5 +0,0 @@
 | 
				
			|||||||
substitutions:
 | 
					 | 
				
			||||||
  gate_pin: GPIO5
 | 
					 | 
				
			||||||
  zero_cross_pin: GPIO4
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
<<: !include common.yaml
 | 
					 | 
				
			||||||
@@ -1,5 +1,5 @@
 | 
				
			|||||||
substitutions:
 | 
					substitutions:
 | 
				
			||||||
  gate_pin: GPIO5
 | 
					  gate_pin: GPIO0
 | 
				
			||||||
  zero_cross_pin: GPIO4
 | 
					  zero_cross_pin: GPIO2
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<<: !include common.yaml
 | 
					<<: !include common.yaml
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,11 +0,0 @@
 | 
				
			|||||||
sensor:
 | 
					 | 
				
			||||||
  - id: my_sensor
 | 
					 | 
				
			||||||
    platform: adc
 | 
					 | 
				
			||||||
    name: ADC Test sensor
 | 
					 | 
				
			||||||
    update_interval: "1:01"
 | 
					 | 
				
			||||||
    attenuation: 2.5db
 | 
					 | 
				
			||||||
    unit_of_measurement: "°C"
 | 
					 | 
				
			||||||
    icon: "mdi:water-percent"
 | 
					 | 
				
			||||||
    accuracy_decimals: 5
 | 
					 | 
				
			||||||
    setup_priority: -100
 | 
					 | 
				
			||||||
    force_update: true
 | 
					 | 
				
			||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user