mirror of
				https://github.com/esphome/esphome.git
				synced 2025-10-31 07:03:55 +00:00 
			
		
		
		
	Merge branch 'reduce_api_size' into integration
This commit is contained in:
		
							
								
								
									
										1
									
								
								.clang-tidy.hash
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								.clang-tidy.hash
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| a3cdfc378d28b53b416a1d5bf0ab9077ee18867f0d39436ea8013cf5a4ead87a | ||||
							
								
								
									
										2
									
								
								.github/actions/restore-python/action.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/actions/restore-python/action.yml
									
									
									
									
										vendored
									
									
								
							| @@ -41,7 +41,7 @@ runs: | ||||
|       shell: bash | ||||
|       run: | | ||||
|         python -m venv venv | ||||
|         ./venv/Scripts/activate | ||||
|         source ./venv/Scripts/activate | ||||
|         python --version | ||||
|         pip install -r requirements.txt -r requirements_test.txt | ||||
|         pip install -e . | ||||
|   | ||||
							
								
								
									
										76
									
								
								.github/workflows/ci-clang-tidy-hash.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								.github/workflows/ci-clang-tidy-hash.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | ||||
| name: Clang-tidy Hash CI | ||||
|  | ||||
| on: | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - ".clang-tidy" | ||||
|       - "platformio.ini" | ||||
|       - "requirements_dev.txt" | ||||
|       - ".clang-tidy.hash" | ||||
|       - "script/clang_tidy_hash.py" | ||||
|       - ".github/workflows/ci-clang-tidy-hash.yml" | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|   pull-requests: write | ||||
|  | ||||
| jobs: | ||||
|   verify-hash: | ||||
|     name: Verify clang-tidy hash | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - name: Checkout | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|  | ||||
|       - name: Set up Python | ||||
|         uses: actions/setup-python@v5.6.0 | ||||
|         with: | ||||
|           python-version: "3.11" | ||||
|  | ||||
|       - name: Verify hash | ||||
|         run: | | ||||
|           python script/clang_tidy_hash.py --verify | ||||
|  | ||||
|       - if: failure() | ||||
|         name: Show hash details | ||||
|         run: | | ||||
|           python script/clang_tidy_hash.py | ||||
|           echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY | ||||
|           echo "You have modified clang-tidy configuration but have not updated the hash." | tee -a $GITHUB_STEP_SUMMARY | ||||
|           echo "Please run 'script/clang_tidy_hash.py --update' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY | ||||
|  | ||||
|       - if: failure() | ||||
|         name: Request changes | ||||
|         uses: actions/github-script@v7.0.1 | ||||
|         with: | ||||
|           script: | | ||||
|             await github.rest.pulls.createReview({ | ||||
|               pull_number: context.issue.number, | ||||
|               owner: context.repo.owner, | ||||
|               repo: context.repo.repo, | ||||
|               event: 'REQUEST_CHANGES', | ||||
|               body: 'You have modified clang-tidy configuration but have not updated the hash.\nPlease run `script/clang_tidy_hash.py --update` and commit the changes.' | ||||
|             }) | ||||
|  | ||||
|       - if: success() | ||||
|         name: Dismiss review | ||||
|         uses: actions/github-script@v7.0.1 | ||||
|         with: | ||||
|           script: | | ||||
|             let reviews = await github.rest.pulls.listReviews({ | ||||
|               pull_number: context.issue.number, | ||||
|               owner: context.repo.owner, | ||||
|               repo: context.repo.repo | ||||
|             }); | ||||
|             for (let review of reviews.data) { | ||||
|               if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') { | ||||
|                 await github.rest.pulls.dismissReview({ | ||||
|                   pull_number: context.issue.number, | ||||
|                   owner: context.repo.owner, | ||||
|                   repo: context.repo.repo, | ||||
|                   review_id: review.id, | ||||
|                   message: 'Clang-tidy hash now matches configuration.' | ||||
|                 }); | ||||
|               } | ||||
|             } | ||||
|  | ||||
							
								
								
									
										192
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										192
									
								
								.github/workflows/ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -66,6 +66,8 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.python-linters == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
| @@ -87,6 +89,8 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.python-linters == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
| @@ -108,6 +112,8 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.python-linters == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
| @@ -129,6 +135,8 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.python-linters == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
| @@ -204,6 +212,7 @@ jobs: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|       - name: Restore Python | ||||
|         id: restore-python | ||||
|         uses: ./.github/actions/restore-python | ||||
|         with: | ||||
|           python-version: ${{ matrix.python-version }} | ||||
| @@ -213,23 +222,108 @@ jobs: | ||||
|       - name: Run pytest | ||||
|         if: matrix.os == 'windows-latest' | ||||
|         run: | | ||||
|           ./venv/Scripts/activate | ||||
|           pytest -vv --cov-report=xml --tb=native -n auto tests | ||||
|           . ./venv/Scripts/activate.ps1 | ||||
|           pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/ | ||||
|       - name: Run pytest | ||||
|         if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest' | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           pytest -vv --cov-report=xml --tb=native -n auto tests | ||||
|           pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/ | ||||
|       - name: Upload coverage to Codecov | ||||
|         uses: codecov/codecov-action@v5.4.3 | ||||
|         with: | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|       - name: Save Python virtual environment cache | ||||
|         if: github.ref == 'refs/heads/dev' | ||||
|         uses: actions/cache/save@v4.2.3 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }} | ||||
|  | ||||
|   determine-jobs: | ||||
|     name: Determine which jobs to run | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|     outputs: | ||||
|       integration-tests: ${{ steps.determine.outputs.integration-tests }} | ||||
|       clang-tidy: ${{ steps.determine.outputs.clang-tidy }} | ||||
|       clang-format: ${{ steps.determine.outputs.clang-format }} | ||||
|       python-linters: ${{ steps.determine.outputs.python-linters }} | ||||
|       changed-components: ${{ steps.determine.outputs.changed-components }} | ||||
|       component-test-count: ${{ steps.determine.outputs.component-test-count }} | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|         with: | ||||
|           # Fetch enough history to find the merge base | ||||
|           fetch-depth: 2 | ||||
|       - name: Restore Python | ||||
|         uses: ./.github/actions/restore-python | ||||
|         with: | ||||
|           python-version: ${{ env.DEFAULT_PYTHON }} | ||||
|           cache-key: ${{ needs.common.outputs.cache-key }} | ||||
|       - name: Determine which tests to run | ||||
|         id: determine | ||||
|         env: | ||||
|           GH_TOKEN: ${{ github.token }} | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           output=$(python script/determine-jobs.py) | ||||
|           echo "Test determination output:" | ||||
|           echo "$output" | jq | ||||
|  | ||||
|           # Extract individual fields | ||||
|           echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT | ||||
|           echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT | ||||
|           echo "clang-format=$(echo "$output" | jq -r '.clang_format')" >> $GITHUB_OUTPUT | ||||
|           echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT | ||||
|           echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT | ||||
|           echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT | ||||
|  | ||||
|   integration-tests: | ||||
|     name: Run integration tests | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.integration-tests == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|       - name: Set up Python 3.13 | ||||
|         id: python | ||||
|         uses: actions/setup-python@v5.6.0 | ||||
|         with: | ||||
|           python-version: "3.13" | ||||
|       - name: Restore Python virtual environment | ||||
|         id: cache-venv | ||||
|         uses: actions/cache@v4.2.3 | ||||
|         with: | ||||
|           path: venv | ||||
|           key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }} | ||||
|       - name: Create Python virtual environment | ||||
|         if: steps.cache-venv.outputs.cache-hit != 'true' | ||||
|         run: | | ||||
|           python -m venv venv | ||||
|           . venv/bin/activate | ||||
|           python --version | ||||
|           pip install -r requirements.txt -r requirements_test.txt | ||||
|           pip install -e . | ||||
|       - name: Register matcher | ||||
|         run: echo "::add-matcher::.github/workflows/matchers/pytest.json" | ||||
|       - name: Run integration tests | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           pytest -vv --no-cov --tb=native -n auto tests/integration/ | ||||
|  | ||||
|   clang-format: | ||||
|     name: Check clang-format | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.clang-format == 'true' | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
| @@ -263,6 +357,10 @@ jobs: | ||||
|       - pylint | ||||
|       - pytest | ||||
|       - pyupgrade | ||||
|       - determine-jobs | ||||
|     if: needs.determine-jobs.outputs.clang-tidy == 'true' | ||||
|     env: | ||||
|       GH_TOKEN: ${{ github.token }} | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       max-parallel: 2 | ||||
| @@ -301,6 +399,10 @@ jobs: | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|         with: | ||||
|           # Need history for HEAD~1 to work for checking changed files | ||||
|           fetch-depth: 2 | ||||
|  | ||||
|       - name: Restore Python | ||||
|         uses: ./.github/actions/restore-python | ||||
|         with: | ||||
| @@ -312,14 +414,14 @@ jobs: | ||||
|         uses: actions/cache@v4.2.3 | ||||
|         with: | ||||
|           path: ~/.platformio | ||||
|           key: platformio-${{ matrix.pio_cache_key }} | ||||
|           key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }} | ||||
|  | ||||
|       - name: Cache platformio | ||||
|         if: github.ref != 'refs/heads/dev' | ||||
|         uses: actions/cache/restore@v4.2.3 | ||||
|         with: | ||||
|           path: ~/.platformio | ||||
|           key: platformio-${{ matrix.pio_cache_key }} | ||||
|           key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }} | ||||
|  | ||||
|       - name: Register problem matchers | ||||
|         run: | | ||||
| @@ -333,10 +435,28 @@ jobs: | ||||
|           mkdir -p .temp | ||||
|           pio run --list-targets -e esp32-idf-tidy | ||||
|  | ||||
|       - name: Check if full clang-tidy scan needed | ||||
|         id: check_full_scan | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           if python script/clang_tidy_hash.py --check; then | ||||
|             echo "full_scan=true" >> $GITHUB_OUTPUT | ||||
|             echo "reason=hash_changed" >> $GITHUB_OUTPUT | ||||
|           else | ||||
|             echo "full_scan=false" >> $GITHUB_OUTPUT | ||||
|             echo "reason=normal" >> $GITHUB_OUTPUT | ||||
|           fi | ||||
|  | ||||
|       - name: Run clang-tidy | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           script/clang-tidy --all-headers --fix ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }} | ||||
|           if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then | ||||
|             echo "Running FULL clang-tidy scan (hash changed)" | ||||
|             script/clang-tidy --all-headers --fix ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }} | ||||
|           else | ||||
|             echo "Running clang-tidy on changed files only" | ||||
|             script/clang-tidy --all-headers --fix --changed ${{ matrix.options }} ${{ matrix.ignore_errors && '|| true' || '' }} | ||||
|           fi | ||||
|         env: | ||||
|           # Also cache libdeps, store them in a ~/.platformio subfolder | ||||
|           PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps | ||||
| @@ -346,59 +466,18 @@ jobs: | ||||
|         # yamllint disable-line rule:line-length | ||||
|         if: always() | ||||
|  | ||||
|   list-components: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|     if: github.event_name == 'pull_request' | ||||
|     outputs: | ||||
|       components: ${{ steps.list-components.outputs.components }} | ||||
|       count: ${{ steps.list-components.outputs.count }} | ||||
|     steps: | ||||
|       - name: Check out code from GitHub | ||||
|         uses: actions/checkout@v4.2.2 | ||||
|         with: | ||||
|           # Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works. | ||||
|           fetch-depth: 500 | ||||
|       - name: Get target branch | ||||
|         id: target-branch | ||||
|         run: | | ||||
|           echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT | ||||
|       - name: Fetch ${{ steps.target-branch.outputs.branch }} branch | ||||
|         run: | | ||||
|           git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }} | ||||
|           git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD | ||||
|       - name: Restore Python | ||||
|         uses: ./.github/actions/restore-python | ||||
|         with: | ||||
|           python-version: ${{ env.DEFAULT_PYTHON }} | ||||
|           cache-key: ${{ needs.common.outputs.cache-key }} | ||||
|       - name: Find changed components | ||||
|         id: list-components | ||||
|         run: | | ||||
|           . venv/bin/activate | ||||
|           components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }}) | ||||
|           output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))') | ||||
|           count=$(echo "$output_components" | jq length) | ||||
|  | ||||
|           echo "components=$output_components" >> $GITHUB_OUTPUT | ||||
|           echo "count=$count" >> $GITHUB_OUTPUT | ||||
|  | ||||
|           echo "$count Components:" | ||||
|           echo "$output_components" | jq | ||||
|  | ||||
|   test-build-components: | ||||
|     name: Component test ${{ matrix.file }} | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - list-components | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100 | ||||
|       - determine-jobs | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       max-parallel: 2 | ||||
|       matrix: | ||||
|         file: ${{ fromJson(needs.list-components.outputs.components) }} | ||||
|         file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }} | ||||
|     steps: | ||||
|       - name: Install dependencies | ||||
|         run: | | ||||
| @@ -426,8 +505,8 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - list-components | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100 | ||||
|       - determine-jobs | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100 | ||||
|     outputs: | ||||
|       matrix: ${{ steps.split.outputs.components }} | ||||
|     steps: | ||||
| @@ -436,7 +515,7 @@ jobs: | ||||
|       - name: Split components into 20 groups | ||||
|         id: split | ||||
|         run: | | ||||
|           components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]') | ||||
|           components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]') | ||||
|           echo "components=$components" >> $GITHUB_OUTPUT | ||||
|  | ||||
|   test-build-components-split: | ||||
| @@ -444,9 +523,9 @@ jobs: | ||||
|     runs-on: ubuntu-24.04 | ||||
|     needs: | ||||
|       - common | ||||
|       - list-components | ||||
|       - determine-jobs | ||||
|       - test-build-components-splitter | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100 | ||||
|     if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100 | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       max-parallel: 4 | ||||
| @@ -494,9 +573,10 @@ jobs: | ||||
|       - flake8 | ||||
|       - pylint | ||||
|       - pytest | ||||
|       - integration-tests | ||||
|       - pyupgrade | ||||
|       - clang-tidy | ||||
|       - list-components | ||||
|       - determine-jobs | ||||
|       - test-build-components | ||||
|       - test-build-components-splitter | ||||
|       - test-build-components-split | ||||
|   | ||||
| @@ -48,3 +48,10 @@ repos: | ||||
|         entry: python3 script/run-in-env.py pylint | ||||
|         language: system | ||||
|         types: [python] | ||||
|       - id: clang-tidy-hash | ||||
|         name: Update clang-tidy hash | ||||
|         entry: python script/clang_tidy_hash.py --update-if-changed | ||||
|         language: python | ||||
|         files: ^(\.clang-tidy|platformio\.ini|requirements_dev\.txt)$ | ||||
|         pass_filenames: false | ||||
|         additional_dependencies: [] | ||||
|   | ||||
| @@ -28,7 +28,7 @@ esphome/components/aic3204/* @kbx81 | ||||
| esphome/components/airthings_ble/* @jeromelaban | ||||
| esphome/components/airthings_wave_base/* @jeromelaban @kpfleming @ncareau | ||||
| esphome/components/airthings_wave_mini/* @ncareau | ||||
| esphome/components/airthings_wave_plus/* @jeromelaban | ||||
| esphome/components/airthings_wave_plus/* @jeromelaban @precurse | ||||
| esphome/components/alarm_control_panel/* @grahambrown11 @hwstar | ||||
| esphome/components/alpha3/* @jan-hofmeier | ||||
| esphome/components/am2315c/* @swoboda1337 | ||||
| @@ -170,6 +170,7 @@ esphome/components/ft5x06/* @clydebarrow | ||||
| esphome/components/ft63x6/* @gpambrozio | ||||
| esphome/components/gcja5/* @gcormier | ||||
| esphome/components/gdk101/* @Szewcson | ||||
| esphome/components/gl_r01_i2c/* @pkejval | ||||
| esphome/components/globals/* @esphome/core | ||||
| esphome/components/gp2y1010au0f/* @zry98 | ||||
| esphome/components/gp8403/* @jesserockz | ||||
| @@ -254,6 +255,7 @@ esphome/components/ln882x/* @lamauny | ||||
| esphome/components/lock/* @esphome/core | ||||
| esphome/components/logger/* @esphome/core | ||||
| esphome/components/logger/select/* @clydebarrow | ||||
| esphome/components/lps22/* @nagisa | ||||
| esphome/components/ltr390/* @latonita @sjtrny | ||||
| esphome/components/ltr501/* @latonita | ||||
| esphome/components/ltr_als_ps/* @latonita | ||||
|   | ||||
							
								
								
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								Doxyfile
									
									
									
									
									
								
							| @@ -48,7 +48,7 @@ PROJECT_NAME           = ESPHome | ||||
| # could be handy for archiving the generated documentation or if some version | ||||
| # control system is used. | ||||
|  | ||||
| PROJECT_NUMBER         = 2025.7.0-dev | ||||
| PROJECT_NUMBER         = 2025.8.0-dev | ||||
|  | ||||
| # Using the PROJECT_BRIEF tag one can provide an optional one line description | ||||
| # for a project that appears at the top of each page and should give viewer a | ||||
|   | ||||
| @@ -1 +1 @@ | ||||
| CODEOWNERS = ["@jeromelaban"] | ||||
| CODEOWNERS = ["@jeromelaban", "@precurse"] | ||||
|   | ||||
| @@ -73,11 +73,29 @@ void AirthingsWavePlus::dump_config() { | ||||
|   LOG_SENSOR("  ", "Illuminance", this->illuminance_sensor_); | ||||
| } | ||||
|  | ||||
| AirthingsWavePlus::AirthingsWavePlus() { | ||||
|   this->service_uuid_ = espbt::ESPBTUUID::from_raw(SERVICE_UUID); | ||||
|   this->sensors_data_characteristic_uuid_ = espbt::ESPBTUUID::from_raw(CHARACTERISTIC_UUID); | ||||
| void AirthingsWavePlus::setup() { | ||||
|   const char *service_uuid; | ||||
|   const char *characteristic_uuid; | ||||
|   const char *access_control_point_characteristic_uuid; | ||||
|  | ||||
|   // Change UUIDs for Wave Radon Gen2 | ||||
|   switch (this->wave_device_type_) { | ||||
|     case WaveDeviceType::WAVE_GEN2: | ||||
|       service_uuid = SERVICE_UUID_WAVE_RADON_GEN2; | ||||
|       characteristic_uuid = CHARACTERISTIC_UUID_WAVE_RADON_GEN2; | ||||
|       access_control_point_characteristic_uuid = ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID_WAVE_RADON_GEN2; | ||||
|       break; | ||||
|     default: | ||||
|       // Wave Plus | ||||
|       service_uuid = SERVICE_UUID; | ||||
|       characteristic_uuid = CHARACTERISTIC_UUID; | ||||
|       access_control_point_characteristic_uuid = ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID; | ||||
|   } | ||||
|  | ||||
|   this->service_uuid_ = espbt::ESPBTUUID::from_raw(service_uuid); | ||||
|   this->sensors_data_characteristic_uuid_ = espbt::ESPBTUUID::from_raw(characteristic_uuid); | ||||
|   this->access_control_point_characteristic_uuid_ = | ||||
|       espbt::ESPBTUUID::from_raw(ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID); | ||||
|       espbt::ESPBTUUID::from_raw(access_control_point_characteristic_uuid); | ||||
| } | ||||
|  | ||||
| }  // namespace airthings_wave_plus | ||||
|   | ||||
| @@ -9,13 +9,20 @@ namespace airthings_wave_plus { | ||||
|  | ||||
| namespace espbt = esphome::esp32_ble_tracker; | ||||
|  | ||||
| enum WaveDeviceType : uint8_t { WAVE_PLUS = 0, WAVE_GEN2 = 1 }; | ||||
|  | ||||
| static const char *const SERVICE_UUID = "b42e1c08-ade7-11e4-89d3-123b93f75cba"; | ||||
| static const char *const CHARACTERISTIC_UUID = "b42e2a68-ade7-11e4-89d3-123b93f75cba"; | ||||
| static const char *const ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID = "b42e2d06-ade7-11e4-89d3-123b93f75cba"; | ||||
|  | ||||
| static const char *const SERVICE_UUID_WAVE_RADON_GEN2 = "b42e4a8e-ade7-11e4-89d3-123b93f75cba"; | ||||
| static const char *const CHARACTERISTIC_UUID_WAVE_RADON_GEN2 = "b42e4dcc-ade7-11e4-89d3-123b93f75cba"; | ||||
| static const char *const ACCESS_CONTROL_POINT_CHARACTERISTIC_UUID_WAVE_RADON_GEN2 = | ||||
|     "b42e50d8-ade7-11e4-89d3-123b93f75cba"; | ||||
|  | ||||
| class AirthingsWavePlus : public airthings_wave_base::AirthingsWaveBase { | ||||
|  public: | ||||
|   AirthingsWavePlus(); | ||||
|   void setup() override; | ||||
|  | ||||
|   void dump_config() override; | ||||
|  | ||||
| @@ -23,12 +30,14 @@ class AirthingsWavePlus : public airthings_wave_base::AirthingsWaveBase { | ||||
|   void set_radon_long_term(sensor::Sensor *radon_long_term) { radon_long_term_sensor_ = radon_long_term; } | ||||
|   void set_co2(sensor::Sensor *co2) { co2_sensor_ = co2; } | ||||
|   void set_illuminance(sensor::Sensor *illuminance) { illuminance_sensor_ = illuminance; } | ||||
|   void set_device_type(WaveDeviceType wave_device_type) { wave_device_type_ = wave_device_type; } | ||||
|  | ||||
|  protected: | ||||
|   bool is_valid_radon_value_(uint16_t radon); | ||||
|   bool is_valid_co2_value_(uint16_t co2); | ||||
|  | ||||
|   void read_sensors(uint8_t *raw_value, uint16_t value_len) override; | ||||
|   WaveDeviceType wave_device_type_{WaveDeviceType::WAVE_PLUS}; | ||||
|  | ||||
|   sensor::Sensor *radon_sensor_{nullptr}; | ||||
|   sensor::Sensor *radon_long_term_sensor_{nullptr}; | ||||
|   | ||||
| @@ -7,6 +7,7 @@ from esphome.const import ( | ||||
|     CONF_ILLUMINANCE, | ||||
|     CONF_RADON, | ||||
|     CONF_RADON_LONG_TERM, | ||||
|     CONF_TVOC, | ||||
|     DEVICE_CLASS_CARBON_DIOXIDE, | ||||
|     DEVICE_CLASS_ILLUMINANCE, | ||||
|     ICON_RADIOACTIVE, | ||||
| @@ -15,6 +16,7 @@ from esphome.const import ( | ||||
|     UNIT_LUX, | ||||
|     UNIT_PARTS_PER_MILLION, | ||||
| ) | ||||
| from esphome.types import ConfigType | ||||
|  | ||||
| DEPENDENCIES = airthings_wave_base.DEPENDENCIES | ||||
|  | ||||
| @@ -25,35 +27,59 @@ AirthingsWavePlus = airthings_wave_plus_ns.class_( | ||||
|     "AirthingsWavePlus", airthings_wave_base.AirthingsWaveBase | ||||
| ) | ||||
|  | ||||
| CONF_DEVICE_TYPE = "device_type" | ||||
| WaveDeviceType = airthings_wave_plus_ns.enum("WaveDeviceType") | ||||
| DEVICE_TYPES = { | ||||
|     "WAVE_PLUS": WaveDeviceType.WAVE_PLUS, | ||||
|     "WAVE_GEN2": WaveDeviceType.WAVE_GEN2, | ||||
| } | ||||
|  | ||||
| CONFIG_SCHEMA = airthings_wave_base.BASE_SCHEMA.extend( | ||||
|     { | ||||
|         cv.GenerateID(): cv.declare_id(AirthingsWavePlus), | ||||
|         cv.Optional(CONF_RADON): sensor.sensor_schema( | ||||
|             unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER, | ||||
|             icon=ICON_RADIOACTIVE, | ||||
|             accuracy_decimals=0, | ||||
|             state_class=STATE_CLASS_MEASUREMENT, | ||||
|         ), | ||||
|         cv.Optional(CONF_RADON_LONG_TERM): sensor.sensor_schema( | ||||
|             unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER, | ||||
|             icon=ICON_RADIOACTIVE, | ||||
|             accuracy_decimals=0, | ||||
|             state_class=STATE_CLASS_MEASUREMENT, | ||||
|         ), | ||||
|         cv.Optional(CONF_CO2): sensor.sensor_schema( | ||||
|             unit_of_measurement=UNIT_PARTS_PER_MILLION, | ||||
|             accuracy_decimals=0, | ||||
|             device_class=DEVICE_CLASS_CARBON_DIOXIDE, | ||||
|             state_class=STATE_CLASS_MEASUREMENT, | ||||
|         ), | ||||
|         cv.Optional(CONF_ILLUMINANCE): sensor.sensor_schema( | ||||
|             unit_of_measurement=UNIT_LUX, | ||||
|             accuracy_decimals=0, | ||||
|             device_class=DEVICE_CLASS_ILLUMINANCE, | ||||
|             state_class=STATE_CLASS_MEASUREMENT, | ||||
|         ), | ||||
|     } | ||||
|  | ||||
| def validate_wave_gen2_config(config: ConfigType) -> ConfigType: | ||||
|     """Validate that Wave Gen2 devices don't have CO2 or TVOC sensors.""" | ||||
|     if config[CONF_DEVICE_TYPE] == "WAVE_GEN2": | ||||
|         if CONF_CO2 in config: | ||||
|             raise cv.Invalid("Wave Gen2 devices do not support CO2 sensor") | ||||
|         # Check for TVOC in the base schema config | ||||
|         if CONF_TVOC in config: | ||||
|             raise cv.Invalid("Wave Gen2 devices do not support TVOC sensor") | ||||
|     return config | ||||
|  | ||||
|  | ||||
| CONFIG_SCHEMA = cv.All( | ||||
|     airthings_wave_base.BASE_SCHEMA.extend( | ||||
|         { | ||||
|             cv.GenerateID(): cv.declare_id(AirthingsWavePlus), | ||||
|             cv.Optional(CONF_RADON): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER, | ||||
|                 icon=ICON_RADIOACTIVE, | ||||
|                 accuracy_decimals=0, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|             cv.Optional(CONF_RADON_LONG_TERM): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_BECQUEREL_PER_CUBIC_METER, | ||||
|                 icon=ICON_RADIOACTIVE, | ||||
|                 accuracy_decimals=0, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|             cv.Optional(CONF_CO2): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_PARTS_PER_MILLION, | ||||
|                 accuracy_decimals=0, | ||||
|                 device_class=DEVICE_CLASS_CARBON_DIOXIDE, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|             cv.Optional(CONF_ILLUMINANCE): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_LUX, | ||||
|                 accuracy_decimals=0, | ||||
|                 device_class=DEVICE_CLASS_ILLUMINANCE, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|             cv.Optional(CONF_DEVICE_TYPE, default="WAVE_PLUS"): cv.enum( | ||||
|                 DEVICE_TYPES, upper=True | ||||
|             ), | ||||
|         } | ||||
|     ), | ||||
|     validate_wave_gen2_config, | ||||
| ) | ||||
|  | ||||
|  | ||||
| @@ -73,3 +99,4 @@ async def to_code(config): | ||||
|     if config_illuminance := config.get(CONF_ILLUMINANCE): | ||||
|         sens = await sensor.new_sensor(config_illuminance) | ||||
|         cg.add(var.set_illuminance(sens)) | ||||
|     cg.add(var.set_device_type(config[CONF_DEVICE_TYPE])) | ||||
|   | ||||
| @@ -23,7 +23,7 @@ void APDS9960::setup() { | ||||
|     return; | ||||
|   } | ||||
|  | ||||
|   if (id != 0xAB && id != 0x9C && id != 0xA8) {  // APDS9960 all should have one of these IDs | ||||
|   if (id != 0xAB && id != 0x9C && id != 0xA8 && id != 0x9E) {  // APDS9960 all should have one of these IDs | ||||
|     this->error_code_ = WRONG_ID; | ||||
|     this->mark_failed(); | ||||
|     return; | ||||
|   | ||||
| @@ -374,6 +374,7 @@ message CoverCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_COVER"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|  | ||||
| @@ -387,6 +388,7 @@ message CoverCommandRequest { | ||||
|   bool has_tilt = 6; | ||||
|   float tilt = 7; | ||||
|   bool stop = 8; | ||||
|   uint32 device_id = 9; | ||||
| } | ||||
|  | ||||
| // ==================== FAN ==================== | ||||
| @@ -441,6 +443,7 @@ message FanCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_FAN"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool has_state = 2; | ||||
| @@ -455,6 +458,7 @@ message FanCommandRequest { | ||||
|   int32 speed_level = 11; | ||||
|   bool has_preset_mode = 12; | ||||
|   string preset_mode = 13; | ||||
|   uint32 device_id = 14; | ||||
| } | ||||
|  | ||||
| // ==================== LIGHT ==================== | ||||
| @@ -523,6 +527,7 @@ message LightCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_LIGHT"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool has_state = 2; | ||||
| @@ -551,6 +556,7 @@ message LightCommandRequest { | ||||
|   uint32 flash_length = 17; | ||||
|   bool has_effect = 18; | ||||
|   string effect = 19; | ||||
|   uint32 device_id = 28; | ||||
| } | ||||
|  | ||||
| // ==================== SENSOR ==================== | ||||
| @@ -640,9 +646,11 @@ message SwitchCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_SWITCH"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool state = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|  | ||||
| // ==================== TEXT SENSOR ==================== | ||||
| @@ -850,12 +858,14 @@ message ListEntitiesCameraResponse { | ||||
|  | ||||
| message CameraImageResponse { | ||||
|   option (id) = 44; | ||||
|   option (base_class) = "StateResponseProtoMessage"; | ||||
|   option (source) = SOURCE_SERVER; | ||||
|   option (ifdef) = "USE_CAMERA"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bytes data = 2; | ||||
|   bool done = 3; | ||||
|   uint32 device_id = 4; | ||||
| } | ||||
| message CameraImageRequest { | ||||
|   option (id) = 45; | ||||
| @@ -980,6 +990,7 @@ message ClimateCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_CLIMATE"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool has_mode = 2; | ||||
| @@ -1005,6 +1016,7 @@ message ClimateCommandRequest { | ||||
|   string custom_preset = 21; | ||||
|   bool has_target_humidity = 22; | ||||
|   float target_humidity = 23; | ||||
|   uint32 device_id = 24; | ||||
| } | ||||
|  | ||||
| // ==================== NUMBER ==================== | ||||
| @@ -1054,9 +1066,11 @@ message NumberCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_NUMBER"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   float state = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|  | ||||
| // ==================== SELECT ==================== | ||||
| @@ -1096,9 +1110,11 @@ message SelectCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_SELECT"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   string state = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|  | ||||
| // ==================== SIREN ==================== | ||||
| @@ -1137,6 +1153,7 @@ message SirenCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_SIREN"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool has_state = 2; | ||||
| @@ -1147,6 +1164,7 @@ message SirenCommandRequest { | ||||
|   uint32 duration = 7; | ||||
|   bool has_volume = 8; | ||||
|   float volume = 9; | ||||
|   uint32 device_id = 10; | ||||
| } | ||||
|  | ||||
| // ==================== LOCK ==================== | ||||
| @@ -1201,12 +1219,14 @@ message LockCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_LOCK"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|   fixed32 key = 1; | ||||
|   LockCommand command = 2; | ||||
|  | ||||
|   // Not yet implemented: | ||||
|   bool has_code = 3; | ||||
|   string code = 4; | ||||
|   uint32 device_id = 5; | ||||
| } | ||||
|  | ||||
| // ==================== BUTTON ==================== | ||||
| @@ -1232,8 +1252,10 @@ message ButtonCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_BUTTON"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   uint32 device_id = 2; | ||||
| } | ||||
|  | ||||
| // ==================== MEDIA PLAYER ==================== | ||||
| @@ -1301,6 +1323,7 @@ message MediaPlayerCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_MEDIA_PLAYER"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|  | ||||
| @@ -1315,6 +1338,7 @@ message MediaPlayerCommandRequest { | ||||
|  | ||||
|   bool has_announcement = 8; | ||||
|   bool announcement = 9; | ||||
|   uint32 device_id = 10; | ||||
| } | ||||
|  | ||||
| // ==================== BLUETOOTH ==================== | ||||
| @@ -1843,9 +1867,11 @@ message AlarmControlPanelCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_ALARM_CONTROL_PANEL"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|   fixed32 key = 1; | ||||
|   AlarmControlPanelStateCommand command = 2; | ||||
|   string code = 3; | ||||
|   uint32 device_id = 4; | ||||
| } | ||||
|  | ||||
| // ===================== TEXT ===================== | ||||
| @@ -1892,9 +1918,11 @@ message TextCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_TEXT"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   string state = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|  | ||||
|  | ||||
| @@ -1936,11 +1964,13 @@ message DateCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_DATETIME_DATE"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   uint32 year = 2; | ||||
|   uint32 month = 3; | ||||
|   uint32 day = 4; | ||||
|   uint32 device_id = 5; | ||||
| } | ||||
|  | ||||
| // ==================== DATETIME TIME ==================== | ||||
| @@ -1981,11 +2011,13 @@ message TimeCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_DATETIME_TIME"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   uint32 hour = 2; | ||||
|   uint32 minute = 3; | ||||
|   uint32 second = 4; | ||||
|   uint32 device_id = 5; | ||||
| } | ||||
|  | ||||
| // ==================== EVENT ==================== | ||||
| @@ -2065,11 +2097,13 @@ message ValveCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_VALVE"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   bool has_position = 2; | ||||
|   float position = 3; | ||||
|   bool stop = 4; | ||||
|   uint32 device_id = 5; | ||||
| } | ||||
|  | ||||
| // ==================== DATETIME DATETIME ==================== | ||||
| @@ -2108,9 +2142,11 @@ message DateTimeCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_DATETIME_DATETIME"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   fixed32 epoch_seconds = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|  | ||||
| // ==================== UPDATE ==================== | ||||
| @@ -2160,7 +2196,9 @@ message UpdateCommandRequest { | ||||
|   option (source) = SOURCE_CLIENT; | ||||
|   option (ifdef) = "USE_UPDATE"; | ||||
|   option (no_delay) = true; | ||||
|   option (base_class) = "CommandProtoMessage"; | ||||
|  | ||||
|   fixed32 key = 1; | ||||
|   UpdateCommand command = 2; | ||||
|   uint32 device_id = 3; | ||||
| } | ||||
|   | ||||
| @@ -193,14 +193,15 @@ void APIConnection::loop() { | ||||
|       // If we can't send the ping request directly (tx_buffer full), | ||||
|       // schedule it at the front of the batch so it will be sent with priority | ||||
|       ESP_LOGW(TAG, "Buffer full, ping queued"); | ||||
|       this->schedule_message_front_(nullptr, &APIConnection::try_send_ping_request, PingRequest::MESSAGE_TYPE); | ||||
|       this->schedule_message_front_(nullptr, &APIConnection::try_send_ping_request, PingRequest::MESSAGE_TYPE, | ||||
|                                     PingRequest::ESTIMATED_SIZE); | ||||
|       this->flags_.sent_ping = true;  // Mark as sent to avoid scheduling multiple pings | ||||
|     } | ||||
|   } | ||||
|  | ||||
| #ifdef USE_CAMERA | ||||
|   if (this->image_reader_ && this->image_reader_->available() && this->helper_->can_write_without_blocking()) { | ||||
|     uint32_t to_send = std::min((size_t) MAX_PACKET_SIZE, this->image_reader_->available()); | ||||
|     uint32_t to_send = std::min((size_t) MAX_BATCH_PACKET_SIZE, this->image_reader_->available()); | ||||
|     bool done = this->image_reader_->available() == to_send; | ||||
|     uint32_t msg_size = 0; | ||||
|     ProtoSize::add_fixed_field<4>(msg_size, 1, true); | ||||
| @@ -265,7 +266,7 @@ void APIConnection::on_disconnect_response(const DisconnectResponse &value) { | ||||
|  | ||||
| // Encodes a message to the buffer and returns the total number of bytes used, | ||||
| // including header and footer overhead. Returns 0 if the message doesn't fit. | ||||
| uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint16_t message_type, APIConnection *conn, | ||||
| uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn, | ||||
|                                                  uint32_t remaining_size, bool is_single) { | ||||
| #ifdef HAS_PROTO_MESSAGE_DUMP | ||||
|   // If in log-only mode, just log and return | ||||
| @@ -316,7 +317,7 @@ uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint16_t mes | ||||
| #ifdef USE_BINARY_SENSOR | ||||
| bool APIConnection::send_binary_sensor_state(binary_sensor::BinarySensor *binary_sensor) { | ||||
|   return this->send_message_smart_(binary_sensor, &APIConnection::try_send_binary_sensor_state, | ||||
|                                    BinarySensorStateResponse::MESSAGE_TYPE); | ||||
|                                    BinarySensorStateResponse::MESSAGE_TYPE, BinarySensorStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -343,7 +344,8 @@ uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConne | ||||
|  | ||||
| #ifdef USE_COVER | ||||
| bool APIConnection::send_cover_state(cover::Cover *cover) { | ||||
|   return this->send_message_smart_(cover, &APIConnection::try_send_cover_state, CoverStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(cover, &APIConnection::try_send_cover_state, CoverStateResponse::MESSAGE_TYPE, | ||||
|                                    CoverStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                              bool is_single) { | ||||
| @@ -400,7 +402,8 @@ void APIConnection::cover_command(const CoverCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_FAN | ||||
| bool APIConnection::send_fan_state(fan::Fan *fan) { | ||||
|   return this->send_message_smart_(fan, &APIConnection::try_send_fan_state, FanStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(fan, &APIConnection::try_send_fan_state, FanStateResponse::MESSAGE_TYPE, | ||||
|                                    FanStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                            bool is_single) { | ||||
| @@ -455,7 +458,8 @@ void APIConnection::fan_command(const FanCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_LIGHT | ||||
| bool APIConnection::send_light_state(light::LightState *light) { | ||||
|   return this->send_message_smart_(light, &APIConnection::try_send_light_state, LightStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(light, &APIConnection::try_send_light_state, LightStateResponse::MESSAGE_TYPE, | ||||
|                                    LightStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                              bool is_single) { | ||||
| @@ -543,7 +547,8 @@ void APIConnection::light_command(const LightCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_SENSOR | ||||
| bool APIConnection::send_sensor_state(sensor::Sensor *sensor) { | ||||
|   return this->send_message_smart_(sensor, &APIConnection::try_send_sensor_state, SensorStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(sensor, &APIConnection::try_send_sensor_state, SensorStateResponse::MESSAGE_TYPE, | ||||
|                                    SensorStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -575,7 +580,8 @@ uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection * | ||||
|  | ||||
| #ifdef USE_SWITCH | ||||
| bool APIConnection::send_switch_state(switch_::Switch *a_switch) { | ||||
|   return this->send_message_smart_(a_switch, &APIConnection::try_send_switch_state, SwitchStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(a_switch, &APIConnection::try_send_switch_state, SwitchStateResponse::MESSAGE_TYPE, | ||||
|                                    SwitchStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -611,7 +617,7 @@ void APIConnection::switch_command(const SwitchCommandRequest &msg) { | ||||
| #ifdef USE_TEXT_SENSOR | ||||
| bool APIConnection::send_text_sensor_state(text_sensor::TextSensor *text_sensor) { | ||||
|   return this->send_message_smart_(text_sensor, &APIConnection::try_send_text_sensor_state, | ||||
|                                    TextSensorStateResponse::MESSAGE_TYPE); | ||||
|                                    TextSensorStateResponse::MESSAGE_TYPE, TextSensorStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -638,7 +644,8 @@ uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnect | ||||
|  | ||||
| #ifdef USE_CLIMATE | ||||
| bool APIConnection::send_climate_state(climate::Climate *climate) { | ||||
|   return this->send_message_smart_(climate, &APIConnection::try_send_climate_state, ClimateStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(climate, &APIConnection::try_send_climate_state, ClimateStateResponse::MESSAGE_TYPE, | ||||
|                                    ClimateStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                                bool is_single) { | ||||
| @@ -734,7 +741,8 @@ void APIConnection::climate_command(const ClimateCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_NUMBER | ||||
| bool APIConnection::send_number_state(number::Number *number) { | ||||
|   return this->send_message_smart_(number, &APIConnection::try_send_number_state, NumberStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(number, &APIConnection::try_send_number_state, NumberStateResponse::MESSAGE_TYPE, | ||||
|                                    NumberStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -770,7 +778,8 @@ void APIConnection::number_command(const NumberCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_DATETIME_DATE | ||||
| bool APIConnection::send_date_state(datetime::DateEntity *date) { | ||||
|   return this->send_message_smart_(date, &APIConnection::try_send_date_state, DateStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(date, &APIConnection::try_send_date_state, DateStateResponse::MESSAGE_TYPE, | ||||
|                                    DateStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                             bool is_single) { | ||||
| @@ -800,7 +809,8 @@ void APIConnection::date_command(const DateCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_DATETIME_TIME | ||||
| bool APIConnection::send_time_state(datetime::TimeEntity *time) { | ||||
|   return this->send_message_smart_(time, &APIConnection::try_send_time_state, TimeStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(time, &APIConnection::try_send_time_state, TimeStateResponse::MESSAGE_TYPE, | ||||
|                                    TimeStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                             bool is_single) { | ||||
| @@ -831,7 +841,7 @@ void APIConnection::time_command(const TimeCommandRequest &msg) { | ||||
| #ifdef USE_DATETIME_DATETIME | ||||
| bool APIConnection::send_datetime_state(datetime::DateTimeEntity *datetime) { | ||||
|   return this->send_message_smart_(datetime, &APIConnection::try_send_datetime_state, | ||||
|                                    DateTimeStateResponse::MESSAGE_TYPE); | ||||
|                                    DateTimeStateResponse::MESSAGE_TYPE, DateTimeStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                                 bool is_single) { | ||||
| @@ -862,7 +872,8 @@ void APIConnection::datetime_command(const DateTimeCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_TEXT | ||||
| bool APIConnection::send_text_state(text::Text *text) { | ||||
|   return this->send_message_smart_(text, &APIConnection::try_send_text_state, TextStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(text, &APIConnection::try_send_text_state, TextStateResponse::MESSAGE_TYPE, | ||||
|                                    TextStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -896,7 +907,8 @@ void APIConnection::text_command(const TextCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_SELECT | ||||
| bool APIConnection::send_select_state(select::Select *select) { | ||||
|   return this->send_message_smart_(select, &APIConnection::try_send_select_state, SelectStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(select, &APIConnection::try_send_select_state, SelectStateResponse::MESSAGE_TYPE, | ||||
|                                    SelectStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -944,7 +956,8 @@ void esphome::api::APIConnection::button_command(const ButtonCommandRequest &msg | ||||
|  | ||||
| #ifdef USE_LOCK | ||||
| bool APIConnection::send_lock_state(lock::Lock *a_lock) { | ||||
|   return this->send_message_smart_(a_lock, &APIConnection::try_send_lock_state, LockStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(a_lock, &APIConnection::try_send_lock_state, LockStateResponse::MESSAGE_TYPE, | ||||
|                                    LockStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
| @@ -986,7 +999,8 @@ void APIConnection::lock_command(const LockCommandRequest &msg) { | ||||
|  | ||||
| #ifdef USE_VALVE | ||||
| bool APIConnection::send_valve_state(valve::Valve *valve) { | ||||
|   return this->send_message_smart_(valve, &APIConnection::try_send_valve_state, ValveStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(valve, &APIConnection::try_send_valve_state, ValveStateResponse::MESSAGE_TYPE, | ||||
|                                    ValveStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                              bool is_single) { | ||||
| @@ -1023,7 +1037,7 @@ void APIConnection::valve_command(const ValveCommandRequest &msg) { | ||||
| #ifdef USE_MEDIA_PLAYER | ||||
| bool APIConnection::send_media_player_state(media_player::MediaPlayer *media_player) { | ||||
|   return this->send_message_smart_(media_player, &APIConnection::try_send_media_player_state, | ||||
|                                    MediaPlayerStateResponse::MESSAGE_TYPE); | ||||
|                                    MediaPlayerStateResponse::MESSAGE_TYPE, MediaPlayerStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                                     bool is_single) { | ||||
| @@ -1262,7 +1276,8 @@ void APIConnection::voice_assistant_set_configuration(const VoiceAssistantSetCon | ||||
| #ifdef USE_ALARM_CONTROL_PANEL | ||||
| bool APIConnection::send_alarm_control_panel_state(alarm_control_panel::AlarmControlPanel *a_alarm_control_panel) { | ||||
|   return this->send_message_smart_(a_alarm_control_panel, &APIConnection::try_send_alarm_control_panel_state, | ||||
|                                    AlarmControlPanelStateResponse::MESSAGE_TYPE); | ||||
|                                    AlarmControlPanelStateResponse::MESSAGE_TYPE, | ||||
|                                    AlarmControlPanelStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn, | ||||
|                                                            uint32_t remaining_size, bool is_single) { | ||||
| @@ -1316,7 +1331,8 @@ void APIConnection::alarm_control_panel_command(const AlarmControlPanelCommandRe | ||||
|  | ||||
| #ifdef USE_EVENT | ||||
| void APIConnection::send_event(event::Event *event, const std::string &event_type) { | ||||
|   this->schedule_message_(event, MessageCreator(event_type), EventResponse::MESSAGE_TYPE); | ||||
|   this->schedule_message_(event, MessageCreator(event_type), EventResponse::MESSAGE_TYPE, | ||||
|                           EventResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_event_response(event::Event *event, const std::string &event_type, APIConnection *conn, | ||||
|                                                 uint32_t remaining_size, bool is_single) { | ||||
| @@ -1341,7 +1357,8 @@ uint16_t APIConnection::try_send_event_info(EntityBase *entity, APIConnection *c | ||||
|  | ||||
| #ifdef USE_UPDATE | ||||
| bool APIConnection::send_update_state(update::UpdateEntity *update) { | ||||
|   return this->send_message_smart_(update, &APIConnection::try_send_update_state, UpdateStateResponse::MESSAGE_TYPE); | ||||
|   return this->send_message_smart_(update, &APIConnection::try_send_update_state, UpdateStateResponse::MESSAGE_TYPE, | ||||
|                                    UpdateStateResponse::ESTIMATED_SIZE); | ||||
| } | ||||
| uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                               bool is_single) { | ||||
| @@ -1588,7 +1605,7 @@ bool APIConnection::try_to_clear_buffer(bool log_out_of_space) { | ||||
|   } | ||||
|   return false; | ||||
| } | ||||
| bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint16_t message_type) { | ||||
| bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) { | ||||
|   if (!this->try_to_clear_buffer(message_type != SubscribeLogsResponse::MESSAGE_TYPE)) {  // SubscribeLogsResponse | ||||
|     return false; | ||||
|   } | ||||
| @@ -1622,7 +1639,8 @@ void APIConnection::on_fatal_error() { | ||||
|   this->flags_.remove = true; | ||||
| } | ||||
|  | ||||
| void APIConnection::DeferredBatch::add_item(EntityBase *entity, MessageCreator creator, uint16_t message_type) { | ||||
| void APIConnection::DeferredBatch::add_item(EntityBase *entity, MessageCreator creator, uint8_t message_type, | ||||
|                                             uint8_t estimated_size) { | ||||
|   // Check if we already have a message of this type for this entity | ||||
|   // This provides deduplication per entity/message_type combination | ||||
|   // O(n) but optimized for RAM and not performance. | ||||
| @@ -1637,12 +1655,13 @@ void APIConnection::DeferredBatch::add_item(EntityBase *entity, MessageCreator c | ||||
|   } | ||||
|  | ||||
|   // No existing item found, add new one | ||||
|   items.emplace_back(entity, std::move(creator), message_type); | ||||
|   items.emplace_back(entity, std::move(creator), message_type, estimated_size); | ||||
| } | ||||
|  | ||||
| void APIConnection::DeferredBatch::add_item_front(EntityBase *entity, MessageCreator creator, uint16_t message_type) { | ||||
| void APIConnection::DeferredBatch::add_item_front(EntityBase *entity, MessageCreator creator, uint8_t message_type, | ||||
|                                                   uint8_t estimated_size) { | ||||
|   // Insert at front for high priority messages (no deduplication check) | ||||
|   items.insert(items.begin(), BatchItem(entity, std::move(creator), message_type)); | ||||
|   items.insert(items.begin(), BatchItem(entity, std::move(creator), message_type, estimated_size)); | ||||
| } | ||||
|  | ||||
| bool APIConnection::schedule_batch_() { | ||||
| @@ -1714,7 +1733,7 @@ void APIConnection::process_batch_() { | ||||
|   uint32_t total_estimated_size = 0; | ||||
|   for (size_t i = 0; i < this->deferred_batch_.size(); i++) { | ||||
|     const auto &item = this->deferred_batch_[i]; | ||||
|     total_estimated_size += get_estimated_message_size(item.message_type); | ||||
|     total_estimated_size += item.estimated_size; | ||||
|   } | ||||
|  | ||||
|   // Calculate total overhead for all messages | ||||
| @@ -1752,9 +1771,9 @@ void APIConnection::process_batch_() { | ||||
|  | ||||
|     // Update tracking variables | ||||
|     items_processed++; | ||||
|     // After first message, set remaining size to MAX_PACKET_SIZE to avoid fragmentation | ||||
|     // After first message, set remaining size to MAX_BATCH_PACKET_SIZE to avoid fragmentation | ||||
|     if (items_processed == 1) { | ||||
|       remaining_size = MAX_PACKET_SIZE; | ||||
|       remaining_size = MAX_BATCH_PACKET_SIZE; | ||||
|     } | ||||
|     remaining_size -= payload_size; | ||||
|     // Calculate where the next message's header padding will start | ||||
| @@ -1808,7 +1827,7 @@ void APIConnection::process_batch_() { | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::MessageCreator::operator()(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                                    bool is_single, uint16_t message_type) const { | ||||
|                                                    bool is_single, uint8_t message_type) const { | ||||
| #ifdef USE_EVENT | ||||
|   // Special case: EventResponse uses string pointer | ||||
|   if (message_type == EventResponse::MESSAGE_TYPE) { | ||||
| @@ -1839,149 +1858,6 @@ uint16_t APIConnection::try_send_ping_request(EntityBase *entity, APIConnection | ||||
|   return encode_message_to_buffer(req, PingRequest::MESSAGE_TYPE, conn, remaining_size, is_single); | ||||
| } | ||||
|  | ||||
| uint16_t APIConnection::get_estimated_message_size(uint16_t message_type) { | ||||
|   // Use generated ESTIMATED_SIZE constants from each message type | ||||
|   switch (message_type) { | ||||
| #ifdef USE_BINARY_SENSOR | ||||
|     case BinarySensorStateResponse::MESSAGE_TYPE: | ||||
|       return BinarySensorStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesBinarySensorResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesBinarySensorResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_SENSOR | ||||
|     case SensorStateResponse::MESSAGE_TYPE: | ||||
|       return SensorStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesSensorResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesSensorResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_SWITCH | ||||
|     case SwitchStateResponse::MESSAGE_TYPE: | ||||
|       return SwitchStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesSwitchResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesSwitchResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_TEXT_SENSOR | ||||
|     case TextSensorStateResponse::MESSAGE_TYPE: | ||||
|       return TextSensorStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesTextSensorResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesTextSensorResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_NUMBER | ||||
|     case NumberStateResponse::MESSAGE_TYPE: | ||||
|       return NumberStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesNumberResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesNumberResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_TEXT | ||||
|     case TextStateResponse::MESSAGE_TYPE: | ||||
|       return TextStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesTextResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesTextResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_SELECT | ||||
|     case SelectStateResponse::MESSAGE_TYPE: | ||||
|       return SelectStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesSelectResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesSelectResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_LOCK | ||||
|     case LockStateResponse::MESSAGE_TYPE: | ||||
|       return LockStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesLockResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesLockResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_EVENT | ||||
|     case EventResponse::MESSAGE_TYPE: | ||||
|       return EventResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesEventResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesEventResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_COVER | ||||
|     case CoverStateResponse::MESSAGE_TYPE: | ||||
|       return CoverStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesCoverResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesCoverResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_FAN | ||||
|     case FanStateResponse::MESSAGE_TYPE: | ||||
|       return FanStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesFanResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesFanResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_LIGHT | ||||
|     case LightStateResponse::MESSAGE_TYPE: | ||||
|       return LightStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesLightResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesLightResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_CLIMATE | ||||
|     case ClimateStateResponse::MESSAGE_TYPE: | ||||
|       return ClimateStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesClimateResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesClimateResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_ESP32_CAMERA | ||||
|     case ListEntitiesCameraResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesCameraResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_BUTTON | ||||
|     case ListEntitiesButtonResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesButtonResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_MEDIA_PLAYER | ||||
|     case MediaPlayerStateResponse::MESSAGE_TYPE: | ||||
|       return MediaPlayerStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesMediaPlayerResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesMediaPlayerResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_ALARM_CONTROL_PANEL | ||||
|     case AlarmControlPanelStateResponse::MESSAGE_TYPE: | ||||
|       return AlarmControlPanelStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesAlarmControlPanelResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesAlarmControlPanelResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_DATETIME_DATE | ||||
|     case DateStateResponse::MESSAGE_TYPE: | ||||
|       return DateStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesDateResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesDateResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_DATETIME_TIME | ||||
|     case TimeStateResponse::MESSAGE_TYPE: | ||||
|       return TimeStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesTimeResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesTimeResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_DATETIME_DATETIME | ||||
|     case DateTimeStateResponse::MESSAGE_TYPE: | ||||
|       return DateTimeStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesDateTimeResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesDateTimeResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_VALVE | ||||
|     case ValveStateResponse::MESSAGE_TYPE: | ||||
|       return ValveStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesValveResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesValveResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
| #ifdef USE_UPDATE | ||||
|     case UpdateStateResponse::MESSAGE_TYPE: | ||||
|       return UpdateStateResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesUpdateResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesUpdateResponse::ESTIMATED_SIZE; | ||||
| #endif | ||||
|     case ListEntitiesServicesResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesServicesResponse::ESTIMATED_SIZE; | ||||
|     case ListEntitiesDoneResponse::MESSAGE_TYPE: | ||||
|       return ListEntitiesDoneResponse::ESTIMATED_SIZE; | ||||
|     case DisconnectRequest::MESSAGE_TYPE: | ||||
|       return DisconnectRequest::ESTIMATED_SIZE; | ||||
|     default: | ||||
|       // Fallback for unknown message types | ||||
|       return 24; | ||||
|   } | ||||
| } | ||||
|  | ||||
| }  // namespace api | ||||
| }  // namespace esphome | ||||
| #endif | ||||
|   | ||||
| @@ -33,7 +33,7 @@ class APIConnection : public APIServerConnection { | ||||
|  | ||||
|   bool send_list_info_done() { | ||||
|     return this->schedule_message_(nullptr, &APIConnection::try_send_list_info_done, | ||||
|                                    ListEntitiesDoneResponse::MESSAGE_TYPE); | ||||
|                                    ListEntitiesDoneResponse::MESSAGE_TYPE, ListEntitiesDoneResponse::ESTIMATED_SIZE); | ||||
|   } | ||||
| #ifdef USE_BINARY_SENSOR | ||||
|   bool send_binary_sensor_state(binary_sensor::BinarySensor *binary_sensor); | ||||
| @@ -256,7 +256,7 @@ class APIConnection : public APIServerConnection { | ||||
|   } | ||||
|  | ||||
|   bool try_to_clear_buffer(bool log_out_of_space); | ||||
|   bool send_buffer(ProtoWriteBuffer buffer, uint16_t message_type) override; | ||||
|   bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override; | ||||
|  | ||||
|   std::string get_client_combined_info() const { | ||||
|     if (this->client_info_ == this->client_peername_) { | ||||
| @@ -298,7 +298,7 @@ class APIConnection : public APIServerConnection { | ||||
|   } | ||||
|  | ||||
|   // Non-template helper to encode any ProtoMessage | ||||
|   static uint16_t encode_message_to_buffer(ProtoMessage &msg, uint16_t message_type, APIConnection *conn, | ||||
|   static uint16_t encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn, | ||||
|                                            uint32_t remaining_size, bool is_single); | ||||
|  | ||||
| #ifdef USE_VOICE_ASSISTANT | ||||
| @@ -443,9 +443,6 @@ class APIConnection : public APIServerConnection { | ||||
|   static uint16_t try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                               bool is_single); | ||||
|  | ||||
|   // Helper function to get estimated message size for buffer pre-allocation | ||||
|   static uint16_t get_estimated_message_size(uint16_t message_type); | ||||
|  | ||||
|   // Batch message method for ping requests | ||||
|   static uint16_t try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, | ||||
|                                         bool is_single); | ||||
| @@ -505,10 +502,10 @@ class APIConnection : public APIServerConnection { | ||||
|  | ||||
|     // Call operator - uses message_type to determine union type | ||||
|     uint16_t operator()(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single, | ||||
|                         uint16_t message_type) const; | ||||
|                         uint8_t message_type) const; | ||||
|  | ||||
|     // Manual cleanup method - must be called before destruction for string types | ||||
|     void cleanup(uint16_t message_type) { | ||||
|     void cleanup(uint8_t message_type) { | ||||
| #ifdef USE_EVENT | ||||
|       if (message_type == EventResponse::MESSAGE_TYPE && data_.string_ptr != nullptr) { | ||||
|         delete data_.string_ptr; | ||||
| @@ -529,11 +526,12 @@ class APIConnection : public APIServerConnection { | ||||
|     struct BatchItem { | ||||
|       EntityBase *entity;      // Entity pointer | ||||
|       MessageCreator creator;  // Function that creates the message when needed | ||||
|       uint16_t message_type;   // Message type for overhead calculation | ||||
|       uint8_t message_type;    // Message type for overhead calculation (max 255) | ||||
|       uint8_t estimated_size;  // Estimated message size (max 255 bytes) | ||||
|  | ||||
|       // Constructor for creating BatchItem | ||||
|       BatchItem(EntityBase *entity, MessageCreator creator, uint16_t message_type) | ||||
|           : entity(entity), creator(std::move(creator)), message_type(message_type) {} | ||||
|       BatchItem(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size) | ||||
|           : entity(entity), creator(std::move(creator)), message_type(message_type), estimated_size(estimated_size) {} | ||||
|     }; | ||||
|  | ||||
|     std::vector<BatchItem> items; | ||||
| @@ -559,9 +557,9 @@ class APIConnection : public APIServerConnection { | ||||
|     } | ||||
|  | ||||
|     // Add item to the batch | ||||
|     void add_item(EntityBase *entity, MessageCreator creator, uint16_t message_type); | ||||
|     void add_item(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size); | ||||
|     // Add item to the front of the batch (for high priority messages like ping) | ||||
|     void add_item_front(EntityBase *entity, MessageCreator creator, uint16_t message_type); | ||||
|     void add_item_front(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size); | ||||
|  | ||||
|     // Clear all items with proper cleanup | ||||
|     void clear() { | ||||
| @@ -630,7 +628,7 @@ class APIConnection : public APIServerConnection { | ||||
|   // to send in one go. This is the maximum size of a single packet | ||||
|   // that can be sent over the network. | ||||
|   // This is to avoid fragmentation of the packet. | ||||
|   static constexpr size_t MAX_PACKET_SIZE = 1390;  // MTU | ||||
|   static constexpr size_t MAX_BATCH_PACKET_SIZE = 1390;  // MTU | ||||
|  | ||||
|   bool schedule_batch_(); | ||||
|   void process_batch_(); | ||||
| @@ -641,9 +639,9 @@ class APIConnection : public APIServerConnection { | ||||
|  | ||||
| #ifdef HAS_PROTO_MESSAGE_DUMP | ||||
|   // Helper to log a proto message from a MessageCreator object | ||||
|   void log_proto_message_(EntityBase *entity, const MessageCreator &creator, uint16_t message_type) { | ||||
|   void log_proto_message_(EntityBase *entity, const MessageCreator &creator, uint8_t message_type) { | ||||
|     this->flags_.log_only_mode = true; | ||||
|     creator(entity, this, MAX_PACKET_SIZE, true, message_type); | ||||
|     creator(entity, this, MAX_BATCH_PACKET_SIZE, true, message_type); | ||||
|     this->flags_.log_only_mode = false; | ||||
|   } | ||||
|  | ||||
| @@ -654,7 +652,8 @@ class APIConnection : public APIServerConnection { | ||||
| #endif | ||||
|  | ||||
|   // Helper method to send a message either immediately or via batching | ||||
|   bool send_message_smart_(EntityBase *entity, MessageCreatorPtr creator, uint16_t message_type) { | ||||
|   bool send_message_smart_(EntityBase *entity, MessageCreatorPtr creator, uint8_t message_type, | ||||
|                            uint8_t estimated_size) { | ||||
|     // Try to send immediately if: | ||||
|     // 1. We should try to send immediately (should_try_send_immediately = true) | ||||
|     // 2. Batch delay is 0 (user has opted in to immediate sending) | ||||
| @@ -662,7 +661,7 @@ class APIConnection : public APIServerConnection { | ||||
|     if (this->flags_.should_try_send_immediately && this->get_batch_delay_ms_() == 0 && | ||||
|         this->helper_->can_write_without_blocking()) { | ||||
|       // Now actually encode and send | ||||
|       if (creator(entity, this, MAX_PACKET_SIZE, true) && | ||||
|       if (creator(entity, this, MAX_BATCH_PACKET_SIZE, true) && | ||||
|           this->send_buffer(ProtoWriteBuffer{&this->parent_->get_shared_buffer_ref()}, message_type)) { | ||||
| #ifdef HAS_PROTO_MESSAGE_DUMP | ||||
|         // Log the message in verbose mode | ||||
| @@ -675,23 +674,25 @@ class APIConnection : public APIServerConnection { | ||||
|     } | ||||
|  | ||||
|     // Fall back to scheduled batching | ||||
|     return this->schedule_message_(entity, creator, message_type); | ||||
|     return this->schedule_message_(entity, creator, message_type, estimated_size); | ||||
|   } | ||||
|  | ||||
|   // Helper function to schedule a deferred message with known message type | ||||
|   bool schedule_message_(EntityBase *entity, MessageCreator creator, uint16_t message_type) { | ||||
|     this->deferred_batch_.add_item(entity, std::move(creator), message_type); | ||||
|   bool schedule_message_(EntityBase *entity, MessageCreator creator, uint8_t message_type, uint8_t estimated_size) { | ||||
|     this->deferred_batch_.add_item(entity, std::move(creator), message_type, estimated_size); | ||||
|     return this->schedule_batch_(); | ||||
|   } | ||||
|  | ||||
|   // Overload for function pointers (for info messages and current state reads) | ||||
|   bool schedule_message_(EntityBase *entity, MessageCreatorPtr function_ptr, uint16_t message_type) { | ||||
|     return schedule_message_(entity, MessageCreator(function_ptr), message_type); | ||||
|   bool schedule_message_(EntityBase *entity, MessageCreatorPtr function_ptr, uint8_t message_type, | ||||
|                          uint8_t estimated_size) { | ||||
|     return schedule_message_(entity, MessageCreator(function_ptr), message_type, estimated_size); | ||||
|   } | ||||
|  | ||||
|   // Helper function to schedule a high priority message at the front of the batch | ||||
|   bool schedule_message_front_(EntityBase *entity, MessageCreatorPtr function_ptr, uint16_t message_type) { | ||||
|     this->deferred_batch_.add_item_front(entity, MessageCreator(function_ptr), message_type); | ||||
|   bool schedule_message_front_(EntityBase *entity, MessageCreatorPtr function_ptr, uint8_t message_type, | ||||
|                                uint8_t estimated_size) { | ||||
|     this->deferred_batch_.add_item_front(entity, MessageCreator(function_ptr), message_type, estimated_size); | ||||
|     return this->schedule_batch_(); | ||||
|   } | ||||
| }; | ||||
|   | ||||
| @@ -613,11 +613,13 @@ APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) { | ||||
|   buffer->type = type; | ||||
|   return APIError::OK; | ||||
| } | ||||
| APIError APINoiseFrameHelper::write_protobuf_packet(uint16_t type, ProtoWriteBuffer buffer) { | ||||
| APIError APINoiseFrameHelper::write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) { | ||||
|   // Resize to include MAC space (required for Noise encryption) | ||||
|   buffer.get_buffer()->resize(buffer.get_buffer()->size() + frame_footer_size_); | ||||
|   PacketInfo packet{type, 0, | ||||
|                     static_cast<uint16_t>(buffer.get_buffer()->size() - frame_header_padding_ - frame_footer_size_)}; | ||||
|   uint16_t payload_size = | ||||
|       static_cast<uint16_t>(buffer.get_buffer()->size() - frame_header_padding_ - frame_footer_size_); | ||||
|  | ||||
|   PacketInfo packet{type, 0, payload_size}; | ||||
|   return write_protobuf_packets(buffer, std::span<const PacketInfo>(&packet, 1)); | ||||
| } | ||||
|  | ||||
| @@ -1002,8 +1004,10 @@ APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) { | ||||
|   buffer->type = rx_header_parsed_type_; | ||||
|   return APIError::OK; | ||||
| } | ||||
| APIError APIPlaintextFrameHelper::write_protobuf_packet(uint16_t type, ProtoWriteBuffer buffer) { | ||||
|   PacketInfo packet{type, 0, static_cast<uint16_t>(buffer.get_buffer()->size() - frame_header_padding_)}; | ||||
| APIError APIPlaintextFrameHelper::write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) { | ||||
|   uint16_t payload_size = static_cast<uint16_t>(buffer.get_buffer()->size() - frame_header_padding_); | ||||
|  | ||||
|   PacketInfo packet{type, 0, payload_size}; | ||||
|   return write_protobuf_packets(buffer, std::span<const PacketInfo>(&packet, 1)); | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -30,13 +30,11 @@ struct ReadPacketBuffer { | ||||
|  | ||||
| // Packed packet info structure to minimize memory usage | ||||
| struct PacketInfo { | ||||
|   uint16_t message_type;  // 2 bytes | ||||
|   uint16_t offset;        // 2 bytes (sufficient for packet size ~1460 bytes) | ||||
|   uint16_t payload_size;  // 2 bytes (up to 65535 bytes) | ||||
|   uint16_t padding;       // 2 byte (for alignment) | ||||
|   uint16_t offset;        // Offset in buffer where message starts | ||||
|   uint16_t payload_size;  // Size of the message payload | ||||
|   uint8_t message_type;   // Message type (0-255) | ||||
|  | ||||
|   PacketInfo(uint16_t type, uint16_t off, uint16_t size) | ||||
|       : message_type(type), offset(off), payload_size(size), padding(0) {} | ||||
|   PacketInfo(uint8_t type, uint16_t off, uint16_t size) : offset(off), payload_size(size), message_type(type) {} | ||||
| }; | ||||
|  | ||||
| enum class APIError : uint16_t { | ||||
| @@ -98,7 +96,7 @@ class APIFrameHelper { | ||||
|   } | ||||
|   // Give this helper a name for logging | ||||
|   void set_log_info(std::string info) { info_ = std::move(info); } | ||||
|   virtual APIError write_protobuf_packet(uint16_t type, ProtoWriteBuffer buffer) = 0; | ||||
|   virtual APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) = 0; | ||||
|   // Write multiple protobuf packets in a single operation | ||||
|   // packets contains (message_type, offset, length) for each message in the buffer | ||||
|   // The buffer contains all messages with appropriate padding before each | ||||
| @@ -197,7 +195,7 @@ class APINoiseFrameHelper : public APIFrameHelper { | ||||
|   APIError init() override; | ||||
|   APIError loop() override; | ||||
|   APIError read_packet(ReadPacketBuffer *buffer) override; | ||||
|   APIError write_protobuf_packet(uint16_t type, ProtoWriteBuffer buffer) override; | ||||
|   APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override; | ||||
|   APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override; | ||||
|   // Get the frame header padding required by this protocol | ||||
|   uint8_t frame_header_padding() override { return frame_header_padding_; } | ||||
| @@ -251,7 +249,7 @@ class APIPlaintextFrameHelper : public APIFrameHelper { | ||||
|   APIError init() override; | ||||
|   APIError loop() override; | ||||
|   APIError read_packet(ReadPacketBuffer *buffer) override; | ||||
|   APIError write_protobuf_packet(uint16_t type, ProtoWriteBuffer buffer) override; | ||||
|   APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override; | ||||
|   APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override; | ||||
|   uint8_t frame_header_padding() override { return frame_header_padding_; } | ||||
|   // Get the frame footer size required by this protocol | ||||
|   | ||||
| @@ -623,6 +623,10 @@ bool CoverCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->stop = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 9: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -654,6 +658,7 @@ void CoverCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_bool(6, this->has_tilt); | ||||
|   buffer.encode_float(7, this->tilt); | ||||
|   buffer.encode_bool(8, this->stop); | ||||
|   buffer.encode_uint32(9, this->device_id); | ||||
| } | ||||
| void CoverCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -664,6 +669,7 @@ void CoverCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_tilt, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->tilt != 0.0f, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->stop, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_FAN | ||||
| @@ -889,6 +895,10 @@ bool FanCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->has_preset_mode = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 14: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -927,6 +937,7 @@ void FanCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_int32(11, this->speed_level); | ||||
|   buffer.encode_bool(12, this->has_preset_mode); | ||||
|   buffer.encode_string(13, this->preset_mode); | ||||
|   buffer.encode_uint32(14, this->device_id); | ||||
| } | ||||
| void FanCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -942,6 +953,7 @@ void FanCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_int32_field(total_size, 1, this->speed_level, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_preset_mode, false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->preset_mode, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_LIGHT | ||||
| @@ -1247,6 +1259,10 @@ bool LightCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->has_effect = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 28: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -1335,6 +1351,7 @@ void LightCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_uint32(17, this->flash_length); | ||||
|   buffer.encode_bool(18, this->has_effect); | ||||
|   buffer.encode_string(19, this->effect); | ||||
|   buffer.encode_uint32(28, this->device_id); | ||||
| } | ||||
| void LightCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -1364,6 +1381,7 @@ void LightCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_uint32_field(total_size, 2, this->flash_length, false); | ||||
|   ProtoSize::add_bool_field(total_size, 2, this->has_effect, false); | ||||
|   ProtoSize::add_string_field(total_size, 2, this->effect, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 2, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_SENSOR | ||||
| @@ -1637,6 +1655,10 @@ bool SwitchCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->state = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -1654,10 +1676,12 @@ bool SwitchCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void SwitchCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_bool(2, this->state); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void SwitchCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_TEXT_SENSOR | ||||
| @@ -2293,6 +2317,10 @@ bool CameraImageResponse::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->done = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 4: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -2321,11 +2349,13 @@ void CameraImageResponse::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_bytes(2, reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()); | ||||
|   buffer.encode_bool(3, this->done); | ||||
|   buffer.encode_uint32(4, this->device_id); | ||||
| } | ||||
| void CameraImageResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->data, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->done, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool CameraImageRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
| @@ -2749,6 +2779,10 @@ bool ClimateCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) | ||||
|       this->has_target_humidity = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 24: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -2817,6 +2851,7 @@ void ClimateCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_string(21, this->custom_preset); | ||||
|   buffer.encode_bool(22, this->has_target_humidity); | ||||
|   buffer.encode_float(23, this->target_humidity); | ||||
|   buffer.encode_uint32(24, this->device_id); | ||||
| } | ||||
| void ClimateCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -2842,6 +2877,7 @@ void ClimateCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_string_field(total_size, 2, this->custom_preset, false); | ||||
|   ProtoSize::add_bool_field(total_size, 2, this->has_target_humidity, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 2, this->target_humidity != 0.0f, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 2, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_NUMBER | ||||
| @@ -2991,6 +3027,16 @@ void NumberStateResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->missing_state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool NumberCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| bool NumberCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
|   switch (field_id) { | ||||
|     case 1: { | ||||
| @@ -3008,10 +3054,12 @@ bool NumberCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void NumberCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_float(2, this->state); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void NumberCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->state != 0.0f, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_SELECT | ||||
| @@ -3143,6 +3191,16 @@ void SelectStateResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->missing_state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool SelectCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| bool SelectCommandRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) { | ||||
|   switch (field_id) { | ||||
|     case 2: { | ||||
| @@ -3166,10 +3224,12 @@ bool SelectCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void SelectCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_string(2, this->state); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void SelectCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_SIREN | ||||
| @@ -3327,6 +3387,10 @@ bool SirenCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->has_volume = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 10: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -3365,6 +3429,7 @@ void SirenCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_uint32(7, this->duration); | ||||
|   buffer.encode_bool(8, this->has_volume); | ||||
|   buffer.encode_float(9, this->volume); | ||||
|   buffer.encode_uint32(10, this->device_id); | ||||
| } | ||||
| void SirenCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -3376,6 +3441,7 @@ void SirenCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->duration, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_volume, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->volume != 0.0f, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_LOCK | ||||
| @@ -3517,6 +3583,10 @@ bool LockCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->has_code = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 5: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -3546,12 +3616,14 @@ void LockCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_enum<enums::LockCommand>(2, this->command); | ||||
|   buffer.encode_bool(3, this->has_code); | ||||
|   buffer.encode_string(4, this->code); | ||||
|   buffer.encode_uint32(5, this->device_id); | ||||
| } | ||||
| void LockCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_enum_field(total_size, 1, static_cast<uint32_t>(this->command), false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_code, false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->code, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_BUTTON | ||||
| @@ -3631,6 +3703,16 @@ void ListEntitiesButtonResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_string_field(total_size, 1, this->device_class, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool ButtonCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
|     case 2: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| bool ButtonCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
|   switch (field_id) { | ||||
|     case 1: { | ||||
| @@ -3641,9 +3723,13 @@ bool ButtonCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| void ButtonCommandRequest::encode(ProtoWriteBuffer buffer) const { buffer.encode_fixed32(1, this->key); } | ||||
| void ButtonCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_uint32(2, this->device_id); | ||||
| } | ||||
| void ButtonCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_MEDIA_PLAYER | ||||
| @@ -3849,6 +3935,10 @@ bool MediaPlayerCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt val | ||||
|       this->announcement = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 10: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -3887,6 +3977,7 @@ void MediaPlayerCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_string(7, this->media_url); | ||||
|   buffer.encode_bool(8, this->has_announcement); | ||||
|   buffer.encode_bool(9, this->announcement); | ||||
|   buffer.encode_uint32(10, this->device_id); | ||||
| } | ||||
| void MediaPlayerCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
| @@ -3898,6 +3989,7 @@ void MediaPlayerCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_string_field(total_size, 1, this->media_url, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_announcement, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->announcement, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_BLUETOOTH_PROXY | ||||
| @@ -5311,6 +5403,10 @@ bool AlarmControlPanelCommandRequest::decode_varint(uint32_t field_id, ProtoVarI | ||||
|       this->command = value.as_enum<enums::AlarmControlPanelStateCommand>(); | ||||
|       return true; | ||||
|     } | ||||
|     case 4: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -5339,11 +5435,13 @@ void AlarmControlPanelCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_enum<enums::AlarmControlPanelStateCommand>(2, this->command); | ||||
|   buffer.encode_string(3, this->code); | ||||
|   buffer.encode_uint32(4, this->device_id); | ||||
| } | ||||
| void AlarmControlPanelCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_enum_field(total_size, 1, static_cast<uint32_t>(this->command), false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->code, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_TEXT | ||||
| @@ -5487,6 +5585,16 @@ void TextStateResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->missing_state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool TextCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| bool TextCommandRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) { | ||||
|   switch (field_id) { | ||||
|     case 2: { | ||||
| @@ -5510,10 +5618,12 @@ bool TextCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void TextCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_string(2, this->state); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void TextCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_string_field(total_size, 1, this->state, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_DATETIME_DATE | ||||
| @@ -5653,6 +5763,10 @@ bool DateCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->day = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     case 5: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -5672,12 +5786,14 @@ void DateCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_uint32(2, this->year); | ||||
|   buffer.encode_uint32(3, this->month); | ||||
|   buffer.encode_uint32(4, this->day); | ||||
|   buffer.encode_uint32(5, this->device_id); | ||||
| } | ||||
| void DateCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->year, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->month, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->day, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_DATETIME_TIME | ||||
| @@ -5817,6 +5933,10 @@ bool TimeCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->second = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     case 5: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -5836,12 +5956,14 @@ void TimeCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_uint32(2, this->hour); | ||||
|   buffer.encode_uint32(3, this->minute); | ||||
|   buffer.encode_uint32(4, this->second); | ||||
|   buffer.encode_uint32(5, this->device_id); | ||||
| } | ||||
| void TimeCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->hour, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->minute, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->second, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_EVENT | ||||
| @@ -6119,6 +6241,10 @@ bool ValveCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->stop = value.as_bool(); | ||||
|       return true; | ||||
|     } | ||||
|     case 5: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -6142,12 +6268,14 @@ void ValveCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_bool(2, this->has_position); | ||||
|   buffer.encode_float(3, this->position); | ||||
|   buffer.encode_bool(4, this->stop); | ||||
|   buffer.encode_uint32(5, this->device_id); | ||||
| } | ||||
| void ValveCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->has_position, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->position != 0.0f, false); | ||||
|   ProtoSize::add_bool_field(total_size, 1, this->stop, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_DATETIME_DATETIME | ||||
| @@ -6261,6 +6389,16 @@ void DateTimeStateResponse::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->epoch_seconds != 0, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| bool DateTimeCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|   switch (field_id) { | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| } | ||||
| bool DateTimeCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
|   switch (field_id) { | ||||
|     case 1: { | ||||
| @@ -6278,10 +6416,12 @@ bool DateTimeCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void DateTimeCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_fixed32(2, this->epoch_seconds); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void DateTimeCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->epoch_seconds != 0, false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
| #ifdef USE_UPDATE | ||||
| @@ -6455,6 +6595,10 @@ bool UpdateCommandRequest::decode_varint(uint32_t field_id, ProtoVarInt value) { | ||||
|       this->command = value.as_enum<enums::UpdateCommand>(); | ||||
|       return true; | ||||
|     } | ||||
|     case 3: { | ||||
|       this->device_id = value.as_uint32(); | ||||
|       return true; | ||||
|     } | ||||
|     default: | ||||
|       return false; | ||||
|   } | ||||
| @@ -6472,10 +6616,12 @@ bool UpdateCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) { | ||||
| void UpdateCommandRequest::encode(ProtoWriteBuffer buffer) const { | ||||
|   buffer.encode_fixed32(1, this->key); | ||||
|   buffer.encode_enum<enums::UpdateCommand>(2, this->command); | ||||
|   buffer.encode_uint32(3, this->device_id); | ||||
| } | ||||
| void UpdateCommandRequest::calculate_size(uint32_t &total_size) const { | ||||
|   ProtoSize::add_fixed_field<4>(total_size, 1, this->key != 0, false); | ||||
|   ProtoSize::add_enum_field(total_size, 1, static_cast<uint32_t>(this->command), false); | ||||
|   ProtoSize::add_uint32_field(total_size, 1, this->device_id, false); | ||||
| } | ||||
| #endif | ||||
|  | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -986,6 +986,11 @@ void CoverCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  stop: "); | ||||
|   out.append(YESNO(this->stop)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -1146,6 +1151,11 @@ void FanCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  preset_mode: "); | ||||
|   out.append("'").append(this->preset_mode).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -1419,6 +1429,11 @@ void LightCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  effect: "); | ||||
|   out.append("'").append(this->effect).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -1586,6 +1601,11 @@ void SwitchCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  state: "); | ||||
|   out.append(YESNO(this->state)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -1944,6 +1964,11 @@ void CameraImageResponse::dump_to(std::string &out) const { | ||||
|   out.append("  done: "); | ||||
|   out.append(YESNO(this->done)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| void CameraImageRequest::dump_to(std::string &out) const { | ||||
| @@ -2263,6 +2288,11 @@ void ClimateCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%g", this->target_humidity); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2367,6 +2397,11 @@ void NumberCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%g", this->state); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2448,6 +2483,11 @@ void SelectCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  state: "); | ||||
|   out.append("'").append(this->state).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2563,6 +2603,11 @@ void SirenCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%g", this->volume); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2658,6 +2703,11 @@ void LockCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  code: "); | ||||
|   out.append("'").append(this->code).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2711,6 +2761,11 @@ void ButtonCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->key); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -2857,6 +2912,11 @@ void MediaPlayerCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  announcement: "); | ||||
|   out.append(YESNO(this->announcement)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -3682,6 +3742,11 @@ void AlarmControlPanelCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  code: "); | ||||
|   out.append("'").append(this->code).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -3775,6 +3840,11 @@ void TextCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  state: "); | ||||
|   out.append("'").append(this->state).append("'"); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -3872,6 +3942,11 @@ void DateCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->day); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -3969,6 +4044,11 @@ void TimeCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->second); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -4138,6 +4218,11 @@ void ValveCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  stop: "); | ||||
|   out.append(YESNO(this->stop)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -4215,6 +4300,11 @@ void DateTimeCommandRequest::dump_to(std::string &out) const { | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->epoch_seconds); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
| @@ -4323,6 +4413,11 @@ void UpdateCommandRequest::dump_to(std::string &out) const { | ||||
|   out.append("  command: "); | ||||
|   out.append(proto_enum_to_string<enums::UpdateCommand>(this->command)); | ||||
|   out.append("\n"); | ||||
|  | ||||
|   out.append("  device_id: "); | ||||
|   snprintf(buffer, sizeof(buffer), "%" PRIu32, this->device_id); | ||||
|   out.append(buffer); | ||||
|   out.append("\n"); | ||||
|   out.append("}"); | ||||
| } | ||||
| #endif | ||||
|   | ||||
| @@ -475,7 +475,8 @@ void APIServer::on_shutdown() { | ||||
|     if (!c->send_message(DisconnectRequest())) { | ||||
|       // If we can't send the disconnect request directly (tx_buffer full), | ||||
|       // schedule it at the front of the batch so it will be sent with priority | ||||
|       c->schedule_message_front_(nullptr, &APIConnection::try_send_disconnect_request, DisconnectRequest::MESSAGE_TYPE); | ||||
|       c->schedule_message_front_(nullptr, &APIConnection::try_send_disconnect_request, DisconnectRequest::MESSAGE_TYPE, | ||||
|                                  DisconnectRequest::ESTIMATED_SIZE); | ||||
|     } | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -14,7 +14,7 @@ class APIConnection; | ||||
| #define LIST_ENTITIES_HANDLER(entity_type, EntityClass, ResponseType) \ | ||||
|   bool ListEntitiesIterator::on_##entity_type(EntityClass *entity) { /* NOLINT(bugprone-macro-parentheses) */ \ | ||||
|     return this->client_->schedule_message_(entity, &APIConnection::try_send_##entity_type##_info, \ | ||||
|                                             ResponseType::MESSAGE_TYPE); \ | ||||
|                                             ResponseType::MESSAGE_TYPE, ResponseType::ESTIMATED_SIZE); \ | ||||
|   } | ||||
|  | ||||
| class ListEntitiesIterator : public ComponentIterator { | ||||
|   | ||||
| @@ -363,11 +363,11 @@ class ProtoService { | ||||
|    * @return A ProtoWriteBuffer object with the reserved size. | ||||
|    */ | ||||
|   virtual ProtoWriteBuffer create_buffer(uint32_t reserve_size) = 0; | ||||
|   virtual bool send_buffer(ProtoWriteBuffer buffer, uint16_t message_type) = 0; | ||||
|   virtual bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) = 0; | ||||
|   virtual void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) = 0; | ||||
|  | ||||
|   // Optimized method that pre-allocates buffer based on message size | ||||
|   bool send_message_(const ProtoMessage &msg, uint16_t message_type) { | ||||
|   bool send_message_(const ProtoMessage &msg, uint8_t message_type) { | ||||
|     uint32_t msg_size = 0; | ||||
|     msg.calculate_size(msg_size); | ||||
|  | ||||
|   | ||||
| @@ -2,6 +2,7 @@ | ||||
|  | ||||
| CODEOWNERS = ["@esphome/core"] | ||||
|  | ||||
| CONF_BYTE_ORDER = "byte_order" | ||||
| CONF_DRAW_ROUNDING = "draw_rounding" | ||||
| CONF_ON_STATE_CHANGE = "on_state_change" | ||||
| CONF_REQUEST_HEADERS = "request_headers" | ||||
|   | ||||
| @@ -53,6 +53,7 @@ void DebugComponent::on_shutdown() { | ||||
|   auto pref = global_preferences->make_preference(REBOOT_MAX_LEN, fnv1_hash(REBOOT_KEY + App.get_name())); | ||||
|   if (component != nullptr) { | ||||
|     strncpy(buffer, component->get_component_source(), REBOOT_MAX_LEN - 1); | ||||
|     buffer[REBOOT_MAX_LEN - 1] = '\0'; | ||||
|   } | ||||
|   ESP_LOGD(TAG, "Storing reboot source: %s", buffer); | ||||
|   pref.save(&buffer); | ||||
| @@ -68,6 +69,7 @@ std::string DebugComponent::get_reset_reason_() { | ||||
|       auto pref = global_preferences->make_preference(REBOOT_MAX_LEN, fnv1_hash(REBOOT_KEY + App.get_name())); | ||||
|       char buffer[REBOOT_MAX_LEN]{}; | ||||
|       if (pref.load(&buffer)) { | ||||
|         buffer[REBOOT_MAX_LEN - 1] = '\0'; | ||||
|         reset_reason = "Reboot request from " + std::string(buffer); | ||||
|       } | ||||
|     } | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from esphome import automation, pins | ||||
| import esphome.codegen as cg | ||||
| from esphome.components import time | ||||
| from esphome.components import esp32, time | ||||
| from esphome.components.esp32 import get_esp32_variant | ||||
| from esphome.components.esp32.const import ( | ||||
|     VARIANT_ESP32, | ||||
| @@ -116,12 +116,20 @@ def validate_pin_number(value): | ||||
|     return value | ||||
|  | ||||
|  | ||||
| def validate_config(config): | ||||
|     if get_esp32_variant() == VARIANT_ESP32C3 and CONF_ESP32_EXT1_WAKEUP in config: | ||||
|         raise cv.Invalid("ESP32-C3 does not support wakeup from touch.") | ||||
|     if get_esp32_variant() == VARIANT_ESP32C3 and CONF_TOUCH_WAKEUP in config: | ||||
|         raise cv.Invalid("ESP32-C3 does not support wakeup from ext1") | ||||
|     return config | ||||
| def _validate_ex1_wakeup_mode(value): | ||||
|     if value == "ALL_LOW": | ||||
|         esp32.only_on_variant(supported=[VARIANT_ESP32], msg_prefix="ALL_LOW")(value) | ||||
|     if value == "ANY_LOW": | ||||
|         esp32.only_on_variant( | ||||
|             supported=[ | ||||
|                 VARIANT_ESP32S2, | ||||
|                 VARIANT_ESP32S3, | ||||
|                 VARIANT_ESP32C6, | ||||
|                 VARIANT_ESP32H2, | ||||
|             ], | ||||
|             msg_prefix="ANY_LOW", | ||||
|         )(value) | ||||
|     return value | ||||
|  | ||||
|  | ||||
| deep_sleep_ns = cg.esphome_ns.namespace("deep_sleep") | ||||
| @@ -148,6 +156,7 @@ WAKEUP_PIN_MODES = { | ||||
| esp_sleep_ext1_wakeup_mode_t = cg.global_ns.enum("esp_sleep_ext1_wakeup_mode_t") | ||||
| Ext1Wakeup = deep_sleep_ns.struct("Ext1Wakeup") | ||||
| EXT1_WAKEUP_MODES = { | ||||
|     "ANY_LOW": esp_sleep_ext1_wakeup_mode_t.ESP_EXT1_WAKEUP_ANY_LOW, | ||||
|     "ALL_LOW": esp_sleep_ext1_wakeup_mode_t.ESP_EXT1_WAKEUP_ALL_LOW, | ||||
|     "ANY_HIGH": esp_sleep_ext1_wakeup_mode_t.ESP_EXT1_WAKEUP_ANY_HIGH, | ||||
| } | ||||
| @@ -187,16 +196,28 @@ CONFIG_SCHEMA = cv.All( | ||||
|             ), | ||||
|             cv.Optional(CONF_ESP32_EXT1_WAKEUP): cv.All( | ||||
|                 cv.only_on_esp32, | ||||
|                 esp32.only_on_variant( | ||||
|                     unsupported=[VARIANT_ESP32C3], msg_prefix="Wakeup from ext1" | ||||
|                 ), | ||||
|                 cv.Schema( | ||||
|                     { | ||||
|                         cv.Required(CONF_PINS): cv.ensure_list( | ||||
|                             pins.internal_gpio_input_pin_schema, validate_pin_number | ||||
|                         ), | ||||
|                         cv.Required(CONF_MODE): cv.enum(EXT1_WAKEUP_MODES, upper=True), | ||||
|                         cv.Required(CONF_MODE): cv.All( | ||||
|                             cv.enum(EXT1_WAKEUP_MODES, upper=True), | ||||
|                             _validate_ex1_wakeup_mode, | ||||
|                         ), | ||||
|                     } | ||||
|                 ), | ||||
|             ), | ||||
|             cv.Optional(CONF_TOUCH_WAKEUP): cv.All(cv.only_on_esp32, cv.boolean), | ||||
|             cv.Optional(CONF_TOUCH_WAKEUP): cv.All( | ||||
|                 cv.only_on_esp32, | ||||
|                 esp32.only_on_variant( | ||||
|                     unsupported=[VARIANT_ESP32C3], msg_prefix="Wakeup from touch" | ||||
|                 ), | ||||
|                 cv.boolean, | ||||
|             ), | ||||
|         } | ||||
|     ).extend(cv.COMPONENT_SCHEMA), | ||||
|     cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266]), | ||||
|   | ||||
| @@ -189,7 +189,7 @@ def get_download_types(storage_json): | ||||
|     ] | ||||
|  | ||||
|  | ||||
| def only_on_variant(*, supported=None, unsupported=None): | ||||
| def only_on_variant(*, supported=None, unsupported=None, msg_prefix="This feature"): | ||||
|     """Config validator for features only available on some ESP32 variants.""" | ||||
|     if supported is not None and not isinstance(supported, list): | ||||
|         supported = [supported] | ||||
| @@ -200,11 +200,11 @@ def only_on_variant(*, supported=None, unsupported=None): | ||||
|         variant = get_esp32_variant() | ||||
|         if supported is not None and variant not in supported: | ||||
|             raise cv.Invalid( | ||||
|                 f"This feature is only available on {', '.join(supported)}" | ||||
|                 f"{msg_prefix} is only available on {', '.join(supported)}" | ||||
|             ) | ||||
|         if unsupported is not None and variant in unsupported: | ||||
|             raise cv.Invalid( | ||||
|                 f"This feature is not available on {', '.join(unsupported)}" | ||||
|                 f"{msg_prefix} is not available on {', '.join(unsupported)}" | ||||
|             ) | ||||
|         return obj | ||||
|  | ||||
| @@ -707,6 +707,7 @@ async def to_code(config): | ||||
|     cg.add_define("ESPHOME_VARIANT", VARIANT_FRIENDLY[config[CONF_VARIANT]]) | ||||
|  | ||||
|     cg.add_platformio_option("lib_ldf_mode", "off") | ||||
|     cg.add_platformio_option("lib_compat_mode", "strict") | ||||
|  | ||||
|     framework_ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] | ||||
|  | ||||
|   | ||||
| @@ -114,7 +114,6 @@ void ESP32InternalGPIOPin::setup() { | ||||
|   if (flags_ & gpio::FLAG_OUTPUT) { | ||||
|     gpio_set_drive_capability(pin_, drive_strength_); | ||||
|   } | ||||
|   ESP_LOGD(TAG, "rtc: %d", SOC_GPIO_SUPPORT_RTC_INDEPENDENT); | ||||
| } | ||||
|  | ||||
| void ESP32InternalGPIOPin::pin_mode(gpio::Flags flags) { | ||||
|   | ||||
| @@ -308,7 +308,7 @@ async def to_code(config): | ||||
|     cg.add(var.set_frame_buffer_count(config[CONF_FRAME_BUFFER_COUNT])) | ||||
|     cg.add(var.set_frame_size(config[CONF_RESOLUTION])) | ||||
|  | ||||
|     cg.add_define("USE_ESP32_CAMERA") | ||||
|     cg.add_define("USE_CAMERA") | ||||
|  | ||||
|     if CORE.using_esp_idf: | ||||
|         add_idf_component(name="espressif/esp32-camera", ref="2.0.15") | ||||
|   | ||||
| @@ -109,6 +109,7 @@ void ESP32TouchComponent::loop() { | ||||
|  | ||||
|       // Only publish if state changed - this filters out repeated events | ||||
|       if (new_state != child->last_state_) { | ||||
|         child->initial_state_published_ = true; | ||||
|         child->last_state_ = new_state; | ||||
|         child->publish_state(new_state); | ||||
|         // Original ESP32: ISR only fires when touched, release is detected by timeout | ||||
| @@ -175,6 +176,9 @@ void ESP32TouchComponent::on_shutdown() { | ||||
| void IRAM_ATTR ESP32TouchComponent::touch_isr_handler(void *arg) { | ||||
|   ESP32TouchComponent *component = static_cast<ESP32TouchComponent *>(arg); | ||||
|  | ||||
|   uint32_t mask = 0; | ||||
|   touch_ll_read_trigger_status_mask(&mask); | ||||
|   touch_ll_clear_trigger_status_mask(); | ||||
|   touch_pad_clear_status(); | ||||
|  | ||||
|   // INTERRUPT BEHAVIOR: On ESP32 v1 hardware, the interrupt fires when ANY configured | ||||
| @@ -184,6 +188,11 @@ void IRAM_ATTR ESP32TouchComponent::touch_isr_handler(void *arg) { | ||||
|   // as any pad remains touched. This allows us to detect both new touches and | ||||
|   // continued touches, but releases must be detected by timeout in the main loop. | ||||
|  | ||||
|   // IMPORTANT: ESP32 v1 touch detection logic - INVERTED compared to v2! | ||||
|   // ESP32 v1: Touch is detected when capacitance INCREASES, causing the measured value to DECREASE | ||||
|   // Therefore: touched = (value < threshold) | ||||
|   // This is opposite to ESP32-S2/S3 v2 where touched = (value > threshold) | ||||
|  | ||||
|   // Process all configured pads to check their current state | ||||
|   // Note: ESP32 v1 doesn't tell us which specific pad triggered the interrupt, | ||||
|   // so we must scan all configured pads to find which ones were touched | ||||
| @@ -201,19 +210,12 @@ void IRAM_ATTR ESP32TouchComponent::touch_isr_handler(void *arg) { | ||||
|       value = touch_ll_read_raw_data(pad); | ||||
|     } | ||||
|  | ||||
|     // Skip pads with 0 value - they haven't been measured in this cycle | ||||
|     // This is important: not all pads are measured every interrupt cycle, | ||||
|     // only those that the hardware has updated | ||||
|     if (value == 0) { | ||||
|     // Skip pads that aren’t in the trigger mask | ||||
|     bool is_touched = (mask >> pad) & 1; | ||||
|     if (!is_touched) { | ||||
|       continue; | ||||
|     } | ||||
|  | ||||
|     // IMPORTANT: ESP32 v1 touch detection logic - INVERTED compared to v2! | ||||
|     // ESP32 v1: Touch is detected when capacitance INCREASES, causing the measured value to DECREASE | ||||
|     // Therefore: touched = (value < threshold) | ||||
|     // This is opposite to ESP32-S2/S3 v2 where touched = (value > threshold) | ||||
|     bool is_touched = value < child->get_threshold(); | ||||
|  | ||||
|     // Always send the current state - the main loop will filter for changes | ||||
|     // We send both touched and untouched states because the ISR doesn't | ||||
|     // track previous state (to keep ISR fast and simple) | ||||
|   | ||||
| @@ -180,6 +180,7 @@ async def to_code(config): | ||||
|     cg.add(esp8266_ns.setup_preferences()) | ||||
|  | ||||
|     cg.add_platformio_option("lib_ldf_mode", "off") | ||||
|     cg.add_platformio_option("lib_compat_mode", "strict") | ||||
|  | ||||
|     cg.add_platformio_option("board", config[CONF_BOARD]) | ||||
|     cg.add_build_flag("-DUSE_ESP8266") | ||||
|   | ||||
							
								
								
									
										0
									
								
								esphome/components/gl_r01_i2c/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								esphome/components/gl_r01_i2c/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										68
									
								
								esphome/components/gl_r01_i2c/gl_r01_i2c.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										68
									
								
								esphome/components/gl_r01_i2c/gl_r01_i2c.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,68 @@ | ||||
| #include "esphome/core/log.h" | ||||
| #include "esphome/core/hal.h" | ||||
| #include "gl_r01_i2c.h" | ||||
|  | ||||
| namespace esphome { | ||||
| namespace gl_r01_i2c { | ||||
|  | ||||
| static const char *const TAG = "gl_r01_i2c"; | ||||
|  | ||||
| // Register definitions from datasheet | ||||
| static const uint8_t REG_VERSION = 0x00; | ||||
| static const uint8_t REG_DISTANCE = 0x02; | ||||
| static const uint8_t REG_TRIGGER = 0x10; | ||||
| static const uint8_t CMD_TRIGGER = 0xB0; | ||||
| static const uint8_t RESTART_CMD1 = 0x5A; | ||||
| static const uint8_t RESTART_CMD2 = 0xA5; | ||||
| static const uint8_t READ_DELAY = 40;  // minimum milliseconds from datasheet to safely read measurement result | ||||
|  | ||||
| void GLR01I2CComponent::setup() { | ||||
|   ESP_LOGCONFIG(TAG, "Setting up GL-R01 I2C..."); | ||||
|   // Verify sensor presence | ||||
|   if (!this->read_byte_16(REG_VERSION, &this->version_)) { | ||||
|     ESP_LOGE(TAG, "Failed to communicate with GL-R01 I2C sensor!"); | ||||
|     this->mark_failed(); | ||||
|     return; | ||||
|   } | ||||
|   ESP_LOGD(TAG, "Found GL-R01 I2C with version 0x%04X", this->version_); | ||||
| } | ||||
|  | ||||
| void GLR01I2CComponent::dump_config() { | ||||
|   ESP_LOGCONFIG(TAG, "GL-R01 I2C:"); | ||||
|   ESP_LOGCONFIG(TAG, " Firmware Version: 0x%04X", this->version_); | ||||
|   LOG_I2C_DEVICE(this); | ||||
|   LOG_SENSOR(" ", "Distance", this); | ||||
| } | ||||
|  | ||||
| void GLR01I2CComponent::update() { | ||||
|   // Trigger a new measurement | ||||
|   if (!this->write_byte(REG_TRIGGER, CMD_TRIGGER)) { | ||||
|     ESP_LOGE(TAG, "Failed to trigger measurement!"); | ||||
|     this->status_set_warning(); | ||||
|     return; | ||||
|   } | ||||
|  | ||||
|   // Schedule reading the result after the read delay | ||||
|   this->set_timeout(READ_DELAY, [this]() { this->read_distance_(); }); | ||||
| } | ||||
|  | ||||
| void GLR01I2CComponent::read_distance_() { | ||||
|   uint16_t distance = 0; | ||||
|   if (!this->read_byte_16(REG_DISTANCE, &distance)) { | ||||
|     ESP_LOGE(TAG, "Failed to read distance value!"); | ||||
|     this->status_set_warning(); | ||||
|     return; | ||||
|   } | ||||
|  | ||||
|   if (distance == 0xFFFF) { | ||||
|     ESP_LOGW(TAG, "Invalid measurement received!"); | ||||
|     this->status_set_warning(); | ||||
|   } else { | ||||
|     ESP_LOGV(TAG, "Distance: %umm", distance); | ||||
|     this->publish_state(distance); | ||||
|     this->status_clear_warning(); | ||||
|   } | ||||
| } | ||||
|  | ||||
| }  // namespace gl_r01_i2c | ||||
| }  // namespace esphome | ||||
							
								
								
									
										22
									
								
								esphome/components/gl_r01_i2c/gl_r01_i2c.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								esphome/components/gl_r01_i2c/gl_r01_i2c.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| #pragma once | ||||
|  | ||||
| #include "esphome/core/component.h" | ||||
| #include "esphome/components/sensor/sensor.h" | ||||
| #include "esphome/components/i2c/i2c.h" | ||||
|  | ||||
| namespace esphome { | ||||
| namespace gl_r01_i2c { | ||||
|  | ||||
| class GLR01I2CComponent : public sensor::Sensor, public i2c::I2CDevice, public PollingComponent { | ||||
|  public: | ||||
|   void setup() override; | ||||
|   void dump_config() override; | ||||
|   void update() override; | ||||
|  | ||||
|  protected: | ||||
|   void read_distance_(); | ||||
|   uint16_t version_{0}; | ||||
| }; | ||||
|  | ||||
| }  // namespace gl_r01_i2c | ||||
| }  // namespace esphome | ||||
							
								
								
									
										36
									
								
								esphome/components/gl_r01_i2c/sensor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								esphome/components/gl_r01_i2c/sensor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| import esphome.codegen as cg | ||||
| import esphome.config_validation as cv | ||||
| from esphome.components import i2c, sensor | ||||
| from esphome.const import ( | ||||
|     CONF_ID, | ||||
|     DEVICE_CLASS_DISTANCE, | ||||
|     STATE_CLASS_MEASUREMENT, | ||||
|     UNIT_MILLIMETER, | ||||
| ) | ||||
|  | ||||
| CODEOWNERS = ["@pkejval"] | ||||
| DEPENDENCIES = ["i2c"] | ||||
|  | ||||
| gl_r01_i2c_ns = cg.esphome_ns.namespace("gl_r01_i2c") | ||||
| GLR01I2CComponent = gl_r01_i2c_ns.class_( | ||||
|     "GLR01I2CComponent", i2c.I2CDevice, cg.PollingComponent | ||||
| ) | ||||
|  | ||||
| CONFIG_SCHEMA = ( | ||||
|     sensor.sensor_schema( | ||||
|         GLR01I2CComponent, | ||||
|         unit_of_measurement=UNIT_MILLIMETER, | ||||
|         accuracy_decimals=0, | ||||
|         device_class=DEVICE_CLASS_DISTANCE, | ||||
|         state_class=STATE_CLASS_MEASUREMENT, | ||||
|     ) | ||||
|     .extend(cv.polling_component_schema("60s")) | ||||
|     .extend(i2c.i2c_device_schema(0x74)) | ||||
| ) | ||||
|  | ||||
|  | ||||
| async def to_code(config): | ||||
|     var = cg.new_Pvariable(config[CONF_ID]) | ||||
|     await cg.register_component(var, config) | ||||
|     await sensor.register_sensor(var, config) | ||||
|     await i2c.register_i2c_device(var, config) | ||||
| @@ -45,3 +45,4 @@ async def to_code(config): | ||||
|     cg.add_define("ESPHOME_BOARD", "host") | ||||
|     cg.add_platformio_option("platform", "platformio/native") | ||||
|     cg.add_platformio_option("lib_ldf_mode", "off") | ||||
|     cg.add_platformio_option("lib_compat_mode", "strict") | ||||
|   | ||||
| @@ -111,8 +111,8 @@ CONFIG_SCHEMA = cv.All( | ||||
|             cv.Optional(CONF_MOISTURE): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_INTENSITY, | ||||
|                 accuracy_decimals=0, | ||||
|                 device_class=DEVICE_CLASS_PRECIPITATION_INTENSITY, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|                 icon="mdi:weather-rainy", | ||||
|             ), | ||||
|             cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_CELSIUS, | ||||
|   | ||||
| @@ -10,8 +10,10 @@ from PIL import Image, UnidentifiedImageError | ||||
|  | ||||
| from esphome import core, external_files | ||||
| import esphome.codegen as cg | ||||
| from esphome.components.const import CONF_BYTE_ORDER | ||||
| import esphome.config_validation as cv | ||||
| from esphome.const import ( | ||||
|     CONF_DEFAULTS, | ||||
|     CONF_DITHER, | ||||
|     CONF_FILE, | ||||
|     CONF_ICON, | ||||
| @@ -38,6 +40,7 @@ CONF_OPAQUE = "opaque" | ||||
| CONF_CHROMA_KEY = "chroma_key" | ||||
| CONF_ALPHA_CHANNEL = "alpha_channel" | ||||
| CONF_INVERT_ALPHA = "invert_alpha" | ||||
| CONF_IMAGES = "images" | ||||
|  | ||||
| TRANSPARENCY_TYPES = ( | ||||
|     CONF_OPAQUE, | ||||
| @@ -188,6 +191,10 @@ class ImageRGB565(ImageEncoder): | ||||
|             dither, | ||||
|             invert_alpha, | ||||
|         ) | ||||
|         self.big_endian = True | ||||
|  | ||||
|     def set_big_endian(self, big_endian: bool) -> None: | ||||
|         self.big_endian = big_endian | ||||
|  | ||||
|     def convert(self, image, path): | ||||
|         return image.convert("RGBA") | ||||
| @@ -205,10 +212,16 @@ class ImageRGB565(ImageEncoder): | ||||
|                 g = 1 | ||||
|                 b = 0 | ||||
|         rgb = (r << 11) | (g << 5) | b | ||||
|         self.data[self.index] = rgb >> 8 | ||||
|         self.index += 1 | ||||
|         self.data[self.index] = rgb & 0xFF | ||||
|         self.index += 1 | ||||
|         if self.big_endian: | ||||
|             self.data[self.index] = rgb >> 8 | ||||
|             self.index += 1 | ||||
|             self.data[self.index] = rgb & 0xFF | ||||
|             self.index += 1 | ||||
|         else: | ||||
|             self.data[self.index] = rgb & 0xFF | ||||
|             self.index += 1 | ||||
|             self.data[self.index] = rgb >> 8 | ||||
|             self.index += 1 | ||||
|         if self.transparency == CONF_ALPHA_CHANNEL: | ||||
|             if self.invert_alpha: | ||||
|                 a ^= 0xFF | ||||
| @@ -364,7 +377,7 @@ def validate_file_shorthand(value): | ||||
|     value = cv.string_strict(value) | ||||
|     parts = value.strip().split(":") | ||||
|     if len(parts) == 2 and parts[0] in MDI_SOURCES: | ||||
|         match = re.match(r"[a-zA-Z0-9\-]+", parts[1]) | ||||
|         match = re.match(r"^[a-zA-Z0-9\-]+$", parts[1]) | ||||
|         if match is None: | ||||
|             raise cv.Invalid(f"Could not parse mdi icon name from '{value}'.") | ||||
|         return download_gh_svg(parts[1], parts[0]) | ||||
| @@ -434,20 +447,29 @@ def validate_type(image_types): | ||||
|  | ||||
|  | ||||
| def validate_settings(value): | ||||
|     type = value[CONF_TYPE] | ||||
|     """ | ||||
|     Validate the settings for a single image configuration. | ||||
|     """ | ||||
|     conf_type = value[CONF_TYPE] | ||||
|     type_class = IMAGE_TYPE[conf_type] | ||||
|     transparency = value[CONF_TRANSPARENCY].lower() | ||||
|     allow_config = IMAGE_TYPE[type].allow_config | ||||
|     if transparency not in allow_config: | ||||
|     if transparency not in type_class.allow_config: | ||||
|         raise cv.Invalid( | ||||
|             f"Image format '{type}' cannot have transparency: {transparency}" | ||||
|             f"Image format '{conf_type}' cannot have transparency: {transparency}" | ||||
|         ) | ||||
|     invert_alpha = value.get(CONF_INVERT_ALPHA, False) | ||||
|     if ( | ||||
|         invert_alpha | ||||
|         and transparency != CONF_ALPHA_CHANNEL | ||||
|         and CONF_INVERT_ALPHA not in allow_config | ||||
|         and CONF_INVERT_ALPHA not in type_class.allow_config | ||||
|     ): | ||||
|         raise cv.Invalid("No alpha channel to invert") | ||||
|     if value.get(CONF_BYTE_ORDER) is not None and not callable( | ||||
|         getattr(type_class, "set_big_endian", None) | ||||
|     ): | ||||
|         raise cv.Invalid( | ||||
|             f"Image format '{conf_type}' does not support byte order configuration" | ||||
|         ) | ||||
|     if file := value.get(CONF_FILE): | ||||
|         file = Path(file) | ||||
|         if is_svg_file(file): | ||||
| @@ -456,31 +478,82 @@ def validate_settings(value): | ||||
|             try: | ||||
|                 Image.open(file) | ||||
|             except UnidentifiedImageError as exc: | ||||
|                 raise cv.Invalid(f"File can't be opened as image: {file}") from exc | ||||
|                 raise cv.Invalid( | ||||
|                     f"File can't be opened as image: {file.absolute()}" | ||||
|                 ) from exc | ||||
|     return value | ||||
|  | ||||
|  | ||||
| IMAGE_ID_SCHEMA = { | ||||
|     cv.Required(CONF_ID): cv.declare_id(Image_), | ||||
|     cv.Required(CONF_FILE): cv.Any(validate_file_shorthand, TYPED_FILE_SCHEMA), | ||||
|     cv.GenerateID(CONF_RAW_DATA_ID): cv.declare_id(cg.uint8), | ||||
| } | ||||
|  | ||||
|  | ||||
| OPTIONS_SCHEMA = { | ||||
|     cv.Optional(CONF_RESIZE): cv.dimensions, | ||||
|     cv.Optional(CONF_DITHER, default="NONE"): cv.one_of( | ||||
|         "NONE", "FLOYDSTEINBERG", upper=True | ||||
|     ), | ||||
|     cv.Optional(CONF_INVERT_ALPHA, default=False): cv.boolean, | ||||
|     cv.Optional(CONF_BYTE_ORDER): cv.one_of("BIG_ENDIAN", "LITTLE_ENDIAN", upper=True), | ||||
|     cv.Optional(CONF_TRANSPARENCY, default=CONF_OPAQUE): validate_transparency(), | ||||
|     cv.Optional(CONF_TYPE): validate_type(IMAGE_TYPE), | ||||
| } | ||||
|  | ||||
| OPTIONS = [key.schema for key in OPTIONS_SCHEMA] | ||||
|  | ||||
| # image schema with no defaults, used with `CONF_IMAGES` in the config | ||||
| IMAGE_SCHEMA_NO_DEFAULTS = { | ||||
|     **IMAGE_ID_SCHEMA, | ||||
|     **{cv.Optional(key): OPTIONS_SCHEMA[key] for key in OPTIONS}, | ||||
| } | ||||
|  | ||||
| BASE_SCHEMA = cv.Schema( | ||||
|     { | ||||
|         cv.Required(CONF_ID): cv.declare_id(Image_), | ||||
|         cv.Required(CONF_FILE): cv.Any(validate_file_shorthand, TYPED_FILE_SCHEMA), | ||||
|         cv.Optional(CONF_RESIZE): cv.dimensions, | ||||
|         cv.Optional(CONF_DITHER, default="NONE"): cv.one_of( | ||||
|             "NONE", "FLOYDSTEINBERG", upper=True | ||||
|         ), | ||||
|         cv.Optional(CONF_INVERT_ALPHA, default=False): cv.boolean, | ||||
|         cv.GenerateID(CONF_RAW_DATA_ID): cv.declare_id(cg.uint8), | ||||
|         **IMAGE_ID_SCHEMA, | ||||
|         **OPTIONS_SCHEMA, | ||||
|     } | ||||
| ).add_extra(validate_settings) | ||||
|  | ||||
| IMAGE_SCHEMA = BASE_SCHEMA.extend( | ||||
|     { | ||||
|         cv.Required(CONF_TYPE): validate_type(IMAGE_TYPE), | ||||
|         cv.Optional(CONF_TRANSPARENCY, default=CONF_OPAQUE): validate_transparency(), | ||||
|     } | ||||
| ) | ||||
|  | ||||
|  | ||||
| def validate_defaults(value): | ||||
|     """ | ||||
|     Validate the options for images with defaults | ||||
|     """ | ||||
|     defaults = value[CONF_DEFAULTS] | ||||
|     result = [] | ||||
|     for index, image in enumerate(value[CONF_IMAGES]): | ||||
|         type = image.get(CONF_TYPE, defaults.get(CONF_TYPE)) | ||||
|         if type is None: | ||||
|             raise cv.Invalid( | ||||
|                 "Type is required either in the image config or in the defaults", | ||||
|                 path=[CONF_IMAGES, index], | ||||
|             ) | ||||
|         type_class = IMAGE_TYPE[type] | ||||
|         # A default byte order should be simply ignored if the type does not support it | ||||
|         available_options = [*OPTIONS] | ||||
|         if ( | ||||
|             not callable(getattr(type_class, "set_big_endian", None)) | ||||
|             and CONF_BYTE_ORDER not in image | ||||
|         ): | ||||
|             available_options.remove(CONF_BYTE_ORDER) | ||||
|         config = { | ||||
|             **{key: image.get(key, defaults.get(key)) for key in available_options}, | ||||
|             **{key.schema: image[key.schema] for key in IMAGE_ID_SCHEMA}, | ||||
|         } | ||||
|         validate_settings(config) | ||||
|         result.append(config) | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def typed_image_schema(image_type): | ||||
|     """ | ||||
|     Construct a schema for a specific image type, allowing transparency options | ||||
| @@ -523,10 +596,33 @@ def typed_image_schema(image_type): | ||||
|  | ||||
| # The config schema can be a (possibly empty) single list of images, | ||||
| # or a dictionary of image types each with a list of images | ||||
| CONFIG_SCHEMA = cv.Any( | ||||
|     cv.Schema({cv.Optional(t.lower()): typed_image_schema(t) for t in IMAGE_TYPE}), | ||||
|     cv.ensure_list(IMAGE_SCHEMA), | ||||
| ) | ||||
| # or a dictionary with keys `defaults:` and `images:` | ||||
|  | ||||
|  | ||||
| def _config_schema(config): | ||||
|     if isinstance(config, list): | ||||
|         return cv.Schema([IMAGE_SCHEMA])(config) | ||||
|     if not isinstance(config, dict): | ||||
|         raise cv.Invalid( | ||||
|             "Badly formed image configuration, expected a list or a dictionary" | ||||
|         ) | ||||
|     if CONF_DEFAULTS in config or CONF_IMAGES in config: | ||||
|         return validate_defaults( | ||||
|             cv.Schema( | ||||
|                 { | ||||
|                     cv.Required(CONF_DEFAULTS): OPTIONS_SCHEMA, | ||||
|                     cv.Required(CONF_IMAGES): cv.ensure_list(IMAGE_SCHEMA_NO_DEFAULTS), | ||||
|                 } | ||||
|             )(config) | ||||
|         ) | ||||
|     if CONF_ID in config or CONF_FILE in config: | ||||
|         return cv.ensure_list(IMAGE_SCHEMA)([config]) | ||||
|     return cv.Schema( | ||||
|         {cv.Optional(t.lower()): typed_image_schema(t) for t in IMAGE_TYPE} | ||||
|     )(config) | ||||
|  | ||||
|  | ||||
| CONFIG_SCHEMA = _config_schema | ||||
|  | ||||
|  | ||||
| async def write_image(config, all_frames=False): | ||||
| @@ -585,6 +681,9 @@ async def write_image(config, all_frames=False): | ||||
|  | ||||
|     total_rows = height * frame_count | ||||
|     encoder = IMAGE_TYPE[type](width, total_rows, transparency, dither, invert_alpha) | ||||
|     if byte_order := config.get(CONF_BYTE_ORDER): | ||||
|         # Check for valid type has already been done in validate_settings | ||||
|         encoder.set_big_endian(byte_order == "BIG_ENDIAN") | ||||
|     for frame_index in range(frame_count): | ||||
|         image.seek(frame_index) | ||||
|         pixels = encoder.convert(image.resize((width, height)), path).getdata() | ||||
|   | ||||
| @@ -268,6 +268,7 @@ async def component_to_code(config): | ||||
|  | ||||
|     # disable library compatibility checks | ||||
|     cg.add_platformio_option("lib_ldf_mode", "off") | ||||
|     cg.add_platformio_option("lib_compat_mode", "strict") | ||||
|     # include <Arduino.h> in every file | ||||
|     cg.add_platformio_option("build_src_flags", "-include Arduino.h") | ||||
|     # dummy version code | ||||
|   | ||||
							
								
								
									
										0
									
								
								esphome/components/lps22/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								esphome/components/lps22/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										75
									
								
								esphome/components/lps22/lps22.cpp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										75
									
								
								esphome/components/lps22/lps22.cpp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,75 @@ | ||||
| #include "lps22.h" | ||||
|  | ||||
| namespace esphome { | ||||
| namespace lps22 { | ||||
|  | ||||
| static constexpr const char *const TAG = "lps22"; | ||||
|  | ||||
| static constexpr uint8_t WHO_AM_I = 0x0F; | ||||
| static constexpr uint8_t LPS22HB_ID = 0xB1; | ||||
| static constexpr uint8_t LPS22HH_ID = 0xB3; | ||||
| static constexpr uint8_t CTRL_REG2 = 0x11; | ||||
| static constexpr uint8_t CTRL_REG2_ONE_SHOT_MASK = 0b1; | ||||
| static constexpr uint8_t STATUS = 0x27; | ||||
| static constexpr uint8_t STATUS_T_DA_MASK = 0b10; | ||||
| static constexpr uint8_t STATUS_P_DA_MASK = 0b01; | ||||
| static constexpr uint8_t TEMP_L = 0x2b; | ||||
| static constexpr uint8_t PRES_OUT_XL = 0x28; | ||||
| static constexpr uint8_t REF_P_XL = 0x28; | ||||
| static constexpr uint8_t READ_ATTEMPTS = 10; | ||||
| static constexpr uint8_t READ_INTERVAL = 5; | ||||
| static constexpr float PRESSURE_SCALE = 1.0f / 4096.0f; | ||||
| static constexpr float TEMPERATURE_SCALE = 0.01f; | ||||
|  | ||||
| void LPS22Component::setup() { | ||||
|   uint8_t value = 0x00; | ||||
|   this->read_register(WHO_AM_I, &value, 1); | ||||
|   if (value != LPS22HB_ID && value != LPS22HH_ID) { | ||||
|     ESP_LOGW(TAG, "device IDs as %02x, which isn't a known LPS22HB or LPS22HH ID", value); | ||||
|     this->mark_failed(); | ||||
|   } | ||||
| } | ||||
|  | ||||
| void LPS22Component::dump_config() { | ||||
|   ESP_LOGCONFIG(TAG, "LPS22:"); | ||||
|   LOG_SENSOR("  ", "Temperature", this->temperature_sensor_); | ||||
|   LOG_SENSOR("  ", "Pressure", this->pressure_sensor_); | ||||
|   LOG_I2C_DEVICE(this); | ||||
|   LOG_UPDATE_INTERVAL(this); | ||||
| } | ||||
|  | ||||
| void LPS22Component::update() { | ||||
|   uint8_t value = 0x00; | ||||
|   this->read_register(CTRL_REG2, &value, 1); | ||||
|   value |= CTRL_REG2_ONE_SHOT_MASK; | ||||
|   this->write_register(CTRL_REG2, &value, 1); | ||||
|   this->set_retry(READ_INTERVAL, READ_ATTEMPTS, [this](uint8_t _) { return this->try_read_(); }); | ||||
| } | ||||
|  | ||||
| RetryResult LPS22Component::try_read_() { | ||||
|   uint8_t value = 0x00; | ||||
|   this->read_register(STATUS, &value, 1); | ||||
|   const uint8_t expected_status_mask = STATUS_T_DA_MASK | STATUS_P_DA_MASK; | ||||
|   if ((value & expected_status_mask) != expected_status_mask) { | ||||
|     ESP_LOGD(TAG, "STATUS not ready: %x", value); | ||||
|     return RetryResult::RETRY; | ||||
|   } | ||||
|  | ||||
|   if (this->temperature_sensor_ != nullptr) { | ||||
|     uint8_t t_buf[2]{0}; | ||||
|     this->read_register(TEMP_L, t_buf, 2); | ||||
|     int16_t encoded = static_cast<int16_t>(encode_uint16(t_buf[1], t_buf[0])); | ||||
|     float temp = TEMPERATURE_SCALE * static_cast<float>(encoded); | ||||
|     this->temperature_sensor_->publish_state(temp); | ||||
|   } | ||||
|   if (this->pressure_sensor_ != nullptr) { | ||||
|     uint8_t p_buf[3]{0}; | ||||
|     this->read_register(PRES_OUT_XL, p_buf, 3); | ||||
|     uint32_t p_lsb = encode_uint24(p_buf[2], p_buf[1], p_buf[0]); | ||||
|     this->pressure_sensor_->publish_state(PRESSURE_SCALE * static_cast<float>(p_lsb)); | ||||
|   } | ||||
|   return RetryResult::DONE; | ||||
| } | ||||
|  | ||||
| }  // namespace lps22 | ||||
| }  // namespace esphome | ||||
							
								
								
									
										27
									
								
								esphome/components/lps22/lps22.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								esphome/components/lps22/lps22.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| #pragma once | ||||
|  | ||||
| #include "esphome/core/component.h" | ||||
| #include "esphome/components/sensor/sensor.h" | ||||
| #include "esphome/components/i2c/i2c.h" | ||||
|  | ||||
| namespace esphome { | ||||
| namespace lps22 { | ||||
|  | ||||
| class LPS22Component : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice { | ||||
|  public: | ||||
|   void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; } | ||||
|   void set_pressure_sensor(sensor::Sensor *pressure_sensor) { this->pressure_sensor_ = pressure_sensor; } | ||||
|  | ||||
|   void setup() override; | ||||
|   void update() override; | ||||
|   void dump_config() override; | ||||
|  | ||||
|  protected: | ||||
|   sensor::Sensor *temperature_sensor_{nullptr}; | ||||
|   sensor::Sensor *pressure_sensor_{nullptr}; | ||||
|  | ||||
|   RetryResult try_read_(); | ||||
| }; | ||||
|  | ||||
| }  // namespace lps22 | ||||
| }  // namespace esphome | ||||
							
								
								
									
										58
									
								
								esphome/components/lps22/sensor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								esphome/components/lps22/sensor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | ||||
| import esphome.codegen as cg | ||||
| import esphome.config_validation as cv | ||||
| from esphome.components import i2c, sensor | ||||
| from esphome.const import ( | ||||
|     CONF_ID, | ||||
|     CONF_TEMPERATURE, | ||||
|     CONF_PRESSURE, | ||||
|     STATE_CLASS_MEASUREMENT, | ||||
|     UNIT_CELSIUS, | ||||
|     UNIT_HECTOPASCAL, | ||||
|     ICON_THERMOMETER, | ||||
|     DEVICE_CLASS_TEMPERATURE, | ||||
|     DEVICE_CLASS_PRESSURE, | ||||
| ) | ||||
|  | ||||
| CODEOWNERS = ["@nagisa"] | ||||
| DEPENDENCIES = ["i2c"] | ||||
|  | ||||
| lps22 = cg.esphome_ns.namespace("lps22") | ||||
|  | ||||
| LPS22Component = lps22.class_("LPS22Component", cg.PollingComponent, i2c.I2CDevice) | ||||
|  | ||||
| CONFIG_SCHEMA = ( | ||||
|     cv.Schema( | ||||
|         { | ||||
|             cv.GenerateID(): cv.declare_id(LPS22Component), | ||||
|             cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_CELSIUS, | ||||
|                 icon=ICON_THERMOMETER, | ||||
|                 accuracy_decimals=2, | ||||
|                 device_class=DEVICE_CLASS_TEMPERATURE, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|             cv.Optional(CONF_PRESSURE): sensor.sensor_schema( | ||||
|                 unit_of_measurement=UNIT_HECTOPASCAL, | ||||
|                 accuracy_decimals=2, | ||||
|                 device_class=DEVICE_CLASS_PRESSURE, | ||||
|                 state_class=STATE_CLASS_MEASUREMENT, | ||||
|             ), | ||||
|         } | ||||
|     ) | ||||
|     .extend(cv.polling_component_schema("60s")) | ||||
|     .extend(i2c.i2c_device_schema(0x5D))  # can also be 0x5C | ||||
| ) | ||||
|  | ||||
|  | ||||
| async def to_code(config): | ||||
|     var = cg.new_Pvariable(config[CONF_ID]) | ||||
|     await cg.register_component(var, config) | ||||
|     await i2c.register_i2c_device(var, config) | ||||
|  | ||||
|     if temperature_config := config.get(CONF_TEMPERATURE): | ||||
|         sens = await sensor.new_sensor(temperature_config) | ||||
|         cg.add(var.set_temperature_sensor(sens)) | ||||
|  | ||||
|     if pressure_config := config.get(CONF_PRESSURE): | ||||
|         sens = await sensor.new_sensor(pressure_config) | ||||
|         cg.add(var.set_pressure_sensor(sens)) | ||||
| @@ -153,11 +153,15 @@ void MQTTBackendESP32::mqtt_event_handler_(const Event &event) { | ||||
|     case MQTT_EVENT_DATA: { | ||||
|       static std::string topic; | ||||
|       if (!event.topic.empty()) { | ||||
|         // When a single message arrives as multiple chunks, the topic will be empty | ||||
|         // on any but the first message, leading to event.topic being an empty string. | ||||
|         // To ensure handlers get the correct topic, cache the last seen topic to | ||||
|         // simulate always receiving the topic from underlying library | ||||
|         topic = event.topic; | ||||
|       } | ||||
|       ESP_LOGV(TAG, "MQTT_EVENT_DATA %s", topic.c_str()); | ||||
|       this->on_message_.call(!event.topic.empty() ? topic.c_str() : nullptr, event.data.data(), event.data.size(), | ||||
|                              event.current_data_offset, event.total_data_len); | ||||
|       this->on_message_.call(topic.c_str(), event.data.data(), event.data.size(), event.current_data_offset, | ||||
|                              event.total_data_len); | ||||
|     } break; | ||||
|     case MQTT_EVENT_ERROR: | ||||
|       ESP_LOGE(TAG, "MQTT_EVENT_ERROR"); | ||||
|   | ||||
| @@ -44,7 +44,7 @@ void NextionBinarySensor::set_state(bool state, bool publish, bool send_to_nexti | ||||
|     return; | ||||
|  | ||||
|   if (send_to_nextion) { | ||||
|     if (this->nextion_->is_sleeping() || !this->visible_) { | ||||
|     if (this->nextion_->is_sleeping() || !this->component_flags_.visible) { | ||||
|       this->needs_to_send_update_ = true; | ||||
|     } else { | ||||
|       this->needs_to_send_update_ = false; | ||||
|   | ||||
| @@ -8,8 +8,8 @@ void NextionComponent::set_background_color(Color bco) { | ||||
|     return;  // This is a variable. no need to set color | ||||
|   } | ||||
|   this->bco_ = bco; | ||||
|   this->bco_needs_update_ = true; | ||||
|   this->bco_is_set_ = true; | ||||
|   this->component_flags_.bco_needs_update = true; | ||||
|   this->component_flags_.bco_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| @@ -19,8 +19,8 @@ void NextionComponent::set_background_pressed_color(Color bco2) { | ||||
|   } | ||||
|  | ||||
|   this->bco2_ = bco2; | ||||
|   this->bco2_needs_update_ = true; | ||||
|   this->bco2_is_set_ = true; | ||||
|   this->component_flags_.bco2_needs_update = true; | ||||
|   this->component_flags_.bco2_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| @@ -29,8 +29,8 @@ void NextionComponent::set_foreground_color(Color pco) { | ||||
|     return;  // This is a variable. no need to set color | ||||
|   } | ||||
|   this->pco_ = pco; | ||||
|   this->pco_needs_update_ = true; | ||||
|   this->pco_is_set_ = true; | ||||
|   this->component_flags_.pco_needs_update = true; | ||||
|   this->component_flags_.pco_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| @@ -39,8 +39,8 @@ void NextionComponent::set_foreground_pressed_color(Color pco2) { | ||||
|     return;  // This is a variable. no need to set color | ||||
|   } | ||||
|   this->pco2_ = pco2; | ||||
|   this->pco2_needs_update_ = true; | ||||
|   this->pco2_is_set_ = true; | ||||
|   this->component_flags_.pco2_needs_update = true; | ||||
|   this->component_flags_.pco2_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| @@ -49,8 +49,8 @@ void NextionComponent::set_font_id(uint8_t font_id) { | ||||
|     return;  // This is a variable. no need to set color | ||||
|   } | ||||
|   this->font_id_ = font_id; | ||||
|   this->font_id_needs_update_ = true; | ||||
|   this->font_id_is_set_ = true; | ||||
|   this->component_flags_.font_id_needs_update = true; | ||||
|   this->component_flags_.font_id_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| @@ -58,20 +58,20 @@ void NextionComponent::set_visible(bool visible) { | ||||
|   if (this->variable_name_ == this->variable_name_to_send_) { | ||||
|     return;  // This is a variable. no need to set color | ||||
|   } | ||||
|   this->visible_ = visible; | ||||
|   this->visible_needs_update_ = true; | ||||
|   this->visible_is_set_ = true; | ||||
|   this->component_flags_.visible = visible; | ||||
|   this->component_flags_.visible_needs_update = true; | ||||
|   this->component_flags_.visible_is_set = true; | ||||
|   this->update_component_settings(); | ||||
| } | ||||
|  | ||||
| void NextionComponent::update_component_settings(bool force_update) { | ||||
|   if (this->nextion_->is_sleeping() || !this->nextion_->is_setup() || !this->visible_is_set_ || | ||||
|       (!this->visible_needs_update_ && !this->visible_)) { | ||||
|   if (this->nextion_->is_sleeping() || !this->nextion_->is_setup() || !this->component_flags_.visible_is_set || | ||||
|       (!this->component_flags_.visible_needs_update && !this->component_flags_.visible)) { | ||||
|     this->needs_to_send_update_ = true; | ||||
|     return; | ||||
|   } | ||||
|  | ||||
|   if (this->visible_needs_update_ || (force_update && this->visible_is_set_)) { | ||||
|   if (this->component_flags_.visible_needs_update || (force_update && this->component_flags_.visible_is_set)) { | ||||
|     std::string name_to_send = this->variable_name_; | ||||
|  | ||||
|     size_t pos = name_to_send.find_last_of('.'); | ||||
| @@ -79,9 +79,9 @@ void NextionComponent::update_component_settings(bool force_update) { | ||||
|       name_to_send = name_to_send.substr(pos + 1); | ||||
|     } | ||||
|  | ||||
|     this->visible_needs_update_ = false; | ||||
|     this->component_flags_.visible_needs_update = false; | ||||
|  | ||||
|     if (this->visible_) { | ||||
|     if (this->component_flags_.visible) { | ||||
|       this->nextion_->show_component(name_to_send.c_str()); | ||||
|       this->send_state_to_nextion(); | ||||
|     } else { | ||||
| @@ -90,26 +90,26 @@ void NextionComponent::update_component_settings(bool force_update) { | ||||
|     } | ||||
|   } | ||||
|  | ||||
|   if (this->bco_needs_update_ || (force_update && this->bco2_is_set_)) { | ||||
|   if (this->component_flags_.bco_needs_update || (force_update && this->component_flags_.bco2_is_set)) { | ||||
|     this->nextion_->set_component_background_color(this->variable_name_.c_str(), this->bco_); | ||||
|     this->bco_needs_update_ = false; | ||||
|     this->component_flags_.bco_needs_update = false; | ||||
|   } | ||||
|   if (this->bco2_needs_update_ || (force_update && this->bco2_is_set_)) { | ||||
|   if (this->component_flags_.bco2_needs_update || (force_update && this->component_flags_.bco2_is_set)) { | ||||
|     this->nextion_->set_component_pressed_background_color(this->variable_name_.c_str(), this->bco2_); | ||||
|     this->bco2_needs_update_ = false; | ||||
|     this->component_flags_.bco2_needs_update = false; | ||||
|   } | ||||
|   if (this->pco_needs_update_ || (force_update && this->pco_is_set_)) { | ||||
|   if (this->component_flags_.pco_needs_update || (force_update && this->component_flags_.pco_is_set)) { | ||||
|     this->nextion_->set_component_foreground_color(this->variable_name_.c_str(), this->pco_); | ||||
|     this->pco_needs_update_ = false; | ||||
|     this->component_flags_.pco_needs_update = false; | ||||
|   } | ||||
|   if (this->pco2_needs_update_ || (force_update && this->pco2_is_set_)) { | ||||
|   if (this->component_flags_.pco2_needs_update || (force_update && this->component_flags_.pco2_is_set)) { | ||||
|     this->nextion_->set_component_pressed_foreground_color(this->variable_name_.c_str(), this->pco2_); | ||||
|     this->pco2_needs_update_ = false; | ||||
|     this->component_flags_.pco2_needs_update = false; | ||||
|   } | ||||
|  | ||||
|   if (this->font_id_needs_update_ || (force_update && this->font_id_is_set_)) { | ||||
|   if (this->component_flags_.font_id_needs_update || (force_update && this->component_flags_.font_id_is_set)) { | ||||
|     this->nextion_->set_component_font(this->variable_name_.c_str(), this->font_id_); | ||||
|     this->font_id_needs_update_ = false; | ||||
|     this->component_flags_.font_id_needs_update = false; | ||||
|   } | ||||
| } | ||||
| }  // namespace nextion | ||||
|   | ||||
| @@ -21,29 +21,64 @@ class NextionComponent : public NextionComponentBase { | ||||
|   void set_visible(bool visible); | ||||
|  | ||||
|  protected: | ||||
|   /** | ||||
|    * @brief Constructor initializes component state with visible=true (default state) | ||||
|    */ | ||||
|   NextionComponent() { | ||||
|     component_flags_ = {};         // Zero-initialize all state | ||||
|     component_flags_.visible = 1;  // Set default visibility to true | ||||
|   } | ||||
|  | ||||
|   NextionBase *nextion_; | ||||
|  | ||||
|   bool bco_needs_update_ = false; | ||||
|   bool bco_is_set_ = false; | ||||
|   Color bco_; | ||||
|   bool bco2_needs_update_ = false; | ||||
|   bool bco2_is_set_ = false; | ||||
|   Color bco2_; | ||||
|   bool pco_needs_update_ = false; | ||||
|   bool pco_is_set_ = false; | ||||
|   Color pco_; | ||||
|   bool pco2_needs_update_ = false; | ||||
|   bool pco2_is_set_ = false; | ||||
|   Color pco2_; | ||||
|   // Color and styling properties | ||||
|   Color bco_;   // Background color | ||||
|   Color bco2_;  // Pressed background color | ||||
|   Color pco_;   // Foreground color | ||||
|   Color pco2_;  // Pressed foreground color | ||||
|   uint8_t font_id_ = 0; | ||||
|   bool font_id_needs_update_ = false; | ||||
|   bool font_id_is_set_ = false; | ||||
|  | ||||
|   bool visible_ = true; | ||||
|   bool visible_needs_update_ = false; | ||||
|   bool visible_is_set_ = false; | ||||
|   /** | ||||
|    * @brief Component state management using compact bitfield structure | ||||
|    * | ||||
|    * Stores all component state flags and properties in a single 16-bit bitfield | ||||
|    * for efficient memory usage and improved cache locality. | ||||
|    * | ||||
|    * Each component property maintains two state flags: | ||||
|    * - needs_update: Indicates the property requires synchronization with the display | ||||
|    * - is_set: Tracks whether the property has been explicitly configured | ||||
|    * | ||||
|    * The visible field stores both the update flags and the actual visibility state. | ||||
|    */ | ||||
|   struct ComponentState { | ||||
|     // Background color flags | ||||
|     uint16_t bco_needs_update : 1; | ||||
|     uint16_t bco_is_set : 1; | ||||
|  | ||||
|   // void send_state_to_nextion() = 0; | ||||
|     // Pressed background color flags | ||||
|     uint16_t bco2_needs_update : 1; | ||||
|     uint16_t bco2_is_set : 1; | ||||
|  | ||||
|     // Foreground color flags | ||||
|     uint16_t pco_needs_update : 1; | ||||
|     uint16_t pco_is_set : 1; | ||||
|  | ||||
|     // Pressed foreground color flags | ||||
|     uint16_t pco2_needs_update : 1; | ||||
|     uint16_t pco2_is_set : 1; | ||||
|  | ||||
|     // Font ID flags | ||||
|     uint16_t font_id_needs_update : 1; | ||||
|     uint16_t font_id_is_set : 1; | ||||
|  | ||||
|     // Visibility flags | ||||
|     uint16_t visible_needs_update : 1; | ||||
|     uint16_t visible_is_set : 1; | ||||
|     uint16_t visible : 1;  // Actual visibility state | ||||
|  | ||||
|     // Reserved bits for future expansion | ||||
|     uint16_t reserved : 3; | ||||
|   } component_flags_; | ||||
| }; | ||||
| }  // namespace nextion | ||||
| }  // namespace esphome | ||||
|   | ||||
| @@ -53,7 +53,7 @@ void NextionSensor::set_state(float state, bool publish, bool send_to_nextion) { | ||||
|  | ||||
|   if (this->wave_chan_id_ == UINT8_MAX) { | ||||
|     if (send_to_nextion) { | ||||
|       if (this->nextion_->is_sleeping() || !this->visible_) { | ||||
|       if (this->nextion_->is_sleeping() || !this->component_flags_.visible) { | ||||
|         this->needs_to_send_update_ = true; | ||||
|       } else { | ||||
|         this->needs_to_send_update_ = false; | ||||
|   | ||||
| @@ -28,7 +28,7 @@ void NextionSwitch::set_state(bool state, bool publish, bool send_to_nextion) { | ||||
|     return; | ||||
|  | ||||
|   if (send_to_nextion) { | ||||
|     if (this->nextion_->is_sleeping() || !this->visible_) { | ||||
|     if (this->nextion_->is_sleeping() || !this->component_flags_.visible) { | ||||
|       this->needs_to_send_update_ = true; | ||||
|     } else { | ||||
|       this->needs_to_send_update_ = false; | ||||
|   | ||||
| @@ -26,7 +26,7 @@ void NextionTextSensor::set_state(const std::string &state, bool publish, bool s | ||||
|     return; | ||||
|  | ||||
|   if (send_to_nextion) { | ||||
|     if (this->nextion_->is_sleeping() || !this->visible_) { | ||||
|     if (this->nextion_->is_sleeping() || !this->component_flags_.visible) { | ||||
|       this->needs_to_send_update_ = true; | ||||
|     } else { | ||||
|       this->nextion_->add_no_result_to_queue_with_set(this, state); | ||||
|   | ||||
| @@ -1,5 +1,6 @@ | ||||
| #include "nfc.h" | ||||
| #include <cstdio> | ||||
| #include "esphome/core/helpers.h" | ||||
| #include "esphome/core/log.h" | ||||
|  | ||||
| namespace esphome { | ||||
| @@ -7,29 +8,9 @@ namespace nfc { | ||||
|  | ||||
| static const char *const TAG = "nfc"; | ||||
|  | ||||
| std::string format_uid(std::vector<uint8_t> &uid) { | ||||
|   char buf[(uid.size() * 2) + uid.size() - 1]; | ||||
|   int offset = 0; | ||||
|   for (size_t i = 0; i < uid.size(); i++) { | ||||
|     const char *format = "%02X"; | ||||
|     if (i + 1 < uid.size()) | ||||
|       format = "%02X-"; | ||||
|     offset += sprintf(buf + offset, format, uid[i]); | ||||
|   } | ||||
|   return std::string(buf); | ||||
| } | ||||
| std::string format_uid(const std::vector<uint8_t> &uid) { return format_hex_pretty(uid, '-', false); } | ||||
|  | ||||
| std::string format_bytes(std::vector<uint8_t> &bytes) { | ||||
|   char buf[(bytes.size() * 2) + bytes.size() - 1]; | ||||
|   int offset = 0; | ||||
|   for (size_t i = 0; i < bytes.size(); i++) { | ||||
|     const char *format = "%02X"; | ||||
|     if (i + 1 < bytes.size()) | ||||
|       format = "%02X "; | ||||
|     offset += sprintf(buf + offset, format, bytes[i]); | ||||
|   } | ||||
|   return std::string(buf); | ||||
| } | ||||
| std::string format_bytes(const std::vector<uint8_t> &bytes) { return format_hex_pretty(bytes, ' ', false); } | ||||
|  | ||||
| uint8_t guess_tag_type(uint8_t uid_length) { | ||||
|   if (uid_length == 4) { | ||||
|   | ||||
| @@ -2,8 +2,8 @@ | ||||
|  | ||||
| #include "esphome/core/helpers.h" | ||||
| #include "esphome/core/log.h" | ||||
| #include "ndef_record.h" | ||||
| #include "ndef_message.h" | ||||
| #include "ndef_record.h" | ||||
| #include "nfc_tag.h" | ||||
|  | ||||
| #include <vector> | ||||
| @@ -53,8 +53,8 @@ static const uint8_t DEFAULT_KEY[6] = {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}; | ||||
| static const uint8_t NDEF_KEY[6] = {0xD3, 0xF7, 0xD3, 0xF7, 0xD3, 0xF7}; | ||||
| static const uint8_t MAD_KEY[6] = {0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5}; | ||||
|  | ||||
| std::string format_uid(std::vector<uint8_t> &uid); | ||||
| std::string format_bytes(std::vector<uint8_t> &bytes); | ||||
| std::string format_uid(const std::vector<uint8_t> &uid); | ||||
| std::string format_bytes(const std::vector<uint8_t> &bytes); | ||||
|  | ||||
| uint8_t guess_tag_type(uint8_t uid_length); | ||||
| uint8_t get_mifare_classic_ndef_start_index(std::vector<uint8_t> &data); | ||||
|   | ||||
| @@ -165,6 +165,7 @@ async def to_code(config): | ||||
|     # Allow LDF to properly discover dependency including those in preprocessor | ||||
|     # conditionals | ||||
|     cg.add_platformio_option("lib_ldf_mode", "chain+") | ||||
|     cg.add_platformio_option("lib_compat_mode", "strict") | ||||
|     cg.add_platformio_option("board", config[CONF_BOARD]) | ||||
|     cg.add_build_flag("-DUSE_RP2040") | ||||
|     cg.set_cpp_standard("gnu++20") | ||||
|   | ||||
| @@ -224,7 +224,7 @@ bool SSD1306::is_sh1106_() const { | ||||
| } | ||||
| bool SSD1306::is_sh1107_() const { return this->model_ == SH1107_MODEL_128_64 || this->model_ == SH1107_MODEL_128_128; } | ||||
| bool SSD1306::is_ssd1305_() const { | ||||
|   return this->model_ == SSD1305_MODEL_128_64 || this->model_ == SSD1305_MODEL_128_64; | ||||
|   return this->model_ == SSD1305_MODEL_128_64 || this->model_ == SSD1305_MODEL_128_32; | ||||
| } | ||||
| void SSD1306::update() { | ||||
|   this->do_update_(); | ||||
|   | ||||
| @@ -3132,7 +3132,7 @@ void HOT GDEY0583T81::display() { | ||||
|   } else { | ||||
|     // Partial out (PTOUT), makes the display exit partial mode | ||||
|     this->command(0x92); | ||||
|     ESP_LOGD(TAG, "Partial update done, next full update after %d cycles", | ||||
|     ESP_LOGD(TAG, "Partial update done, next full update after %" PRIu32 " cycles", | ||||
|              this->full_update_every_ - this->at_update_ - 1); | ||||
|   } | ||||
|  | ||||
|   | ||||
| @@ -78,7 +78,7 @@ enum JsonDetail { DETAIL_ALL, DETAIL_STATE }; | ||||
|   This is because only minimal changes were made to the ESPAsyncWebServer lib_dep, it was undesirable to put deferred | ||||
|   update logic into that library. We need one deferred queue per connection so instead of one AsyncEventSource with | ||||
|   multiple clients, we have multiple event sources with one client each. This is slightly awkward which is why it's | ||||
|   implemented in a more straightforward way for ESP-IDF. Arudino platform will eventually go away and this workaround | ||||
|   implemented in a more straightforward way for ESP-IDF. Arduino platform will eventually go away and this workaround | ||||
|   can be forgotten. | ||||
| */ | ||||
| #ifdef USE_ARDUINO | ||||
|   | ||||
| @@ -1055,6 +1055,7 @@ def float_with_unit(quantity, regex_suffix, optional_unit=False): | ||||
|     return validator | ||||
|  | ||||
|  | ||||
| bps = float_with_unit("bits per second", "(bps|bits/s|bit/s)?") | ||||
| frequency = float_with_unit("frequency", "(Hz|HZ|hz)?") | ||||
| resistance = float_with_unit("resistance", "(Ω|Ω|ohm|Ohm|OHM)?") | ||||
| current = float_with_unit("current", "(a|A|amp|Amp|amps|Amps|ampere|Ampere)?") | ||||
|   | ||||
| @@ -4,7 +4,7 @@ from enum import Enum | ||||
|  | ||||
| from esphome.enum import StrEnum | ||||
|  | ||||
| __version__ = "2025.7.0-dev" | ||||
| __version__ = "2025.8.0-dev" | ||||
|  | ||||
| ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_" | ||||
| VALID_SUBSTITUTIONS_CHARACTERS = ( | ||||
|   | ||||
| @@ -187,6 +187,12 @@ def entity_duplicate_validator(platform: str) -> Callable[[ConfigType], ConfigTy | ||||
|             # No name to validate | ||||
|             return config | ||||
|  | ||||
|         # Skip validation for internal entities | ||||
|         # Internal entities are not exposed to Home Assistant and don't use the hash-based | ||||
|         # entity state tracking system, so name collisions don't matter for them | ||||
|         if config.get(CONF_INTERNAL, False): | ||||
|             return config | ||||
|  | ||||
|         # Get the entity name | ||||
|         entity_name = config[CONF_NAME] | ||||
|  | ||||
|   | ||||
| @@ -258,53 +258,60 @@ std::string format_hex(const uint8_t *data, size_t length) { | ||||
| std::string format_hex(const std::vector<uint8_t> &data) { return format_hex(data.data(), data.size()); } | ||||
|  | ||||
| static char format_hex_pretty_char(uint8_t v) { return v >= 10 ? 'A' + (v - 10) : '0' + v; } | ||||
| std::string format_hex_pretty(const uint8_t *data, size_t length) { | ||||
|   if (length == 0) | ||||
| std::string format_hex_pretty(const uint8_t *data, size_t length, char separator, bool show_length) { | ||||
|   if (data == nullptr || length == 0) | ||||
|     return ""; | ||||
|   std::string ret; | ||||
|   ret.resize(3 * length - 1); | ||||
|   uint8_t multiple = separator ? 3 : 2;  // 3 if separator is not \0, 2 otherwise | ||||
|   ret.resize(multiple * length - (separator ? 1 : 0)); | ||||
|   for (size_t i = 0; i < length; i++) { | ||||
|     ret[3 * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4); | ||||
|     ret[3 * i + 1] = format_hex_pretty_char(data[i] & 0x0F); | ||||
|     if (i != length - 1) | ||||
|       ret[3 * i + 2] = '.'; | ||||
|     ret[multiple * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4); | ||||
|     ret[multiple * i + 1] = format_hex_pretty_char(data[i] & 0x0F); | ||||
|     if (separator && i != length - 1) | ||||
|       ret[multiple * i + 2] = separator; | ||||
|   } | ||||
|   if (length > 4) | ||||
|     return ret + " (" + to_string(length) + ")"; | ||||
|   if (show_length && length > 4) | ||||
|     return ret + " (" + std::to_string(length) + ")"; | ||||
|   return ret; | ||||
| } | ||||
| std::string format_hex_pretty(const std::vector<uint8_t> &data) { return format_hex_pretty(data.data(), data.size()); } | ||||
| std::string format_hex_pretty(const std::vector<uint8_t> &data, char separator, bool show_length) { | ||||
|   return format_hex_pretty(data.data(), data.size(), separator, show_length); | ||||
| } | ||||
|  | ||||
| std::string format_hex_pretty(const uint16_t *data, size_t length) { | ||||
|   if (length == 0) | ||||
| std::string format_hex_pretty(const uint16_t *data, size_t length, char separator, bool show_length) { | ||||
|   if (data == nullptr || length == 0) | ||||
|     return ""; | ||||
|   std::string ret; | ||||
|   ret.resize(5 * length - 1); | ||||
|   uint8_t multiple = separator ? 5 : 4;  // 5 if separator is not \0, 4 otherwise | ||||
|   ret.resize(multiple * length - (separator ? 1 : 0)); | ||||
|   for (size_t i = 0; i < length; i++) { | ||||
|     ret[5 * i] = format_hex_pretty_char((data[i] & 0xF000) >> 12); | ||||
|     ret[5 * i + 1] = format_hex_pretty_char((data[i] & 0x0F00) >> 8); | ||||
|     ret[5 * i + 2] = format_hex_pretty_char((data[i] & 0x00F0) >> 4); | ||||
|     ret[5 * i + 3] = format_hex_pretty_char(data[i] & 0x000F); | ||||
|     if (i != length - 1) | ||||
|       ret[5 * i + 2] = '.'; | ||||
|     ret[multiple * i] = format_hex_pretty_char((data[i] & 0xF000) >> 12); | ||||
|     ret[multiple * i + 1] = format_hex_pretty_char((data[i] & 0x0F00) >> 8); | ||||
|     ret[multiple * i + 2] = format_hex_pretty_char((data[i] & 0x00F0) >> 4); | ||||
|     ret[multiple * i + 3] = format_hex_pretty_char(data[i] & 0x000F); | ||||
|     if (separator && i != length - 1) | ||||
|       ret[multiple * i + 4] = separator; | ||||
|   } | ||||
|   if (length > 4) | ||||
|     return ret + " (" + to_string(length) + ")"; | ||||
|   if (show_length && length > 4) | ||||
|     return ret + " (" + std::to_string(length) + ")"; | ||||
|   return ret; | ||||
| } | ||||
| std::string format_hex_pretty(const std::vector<uint16_t> &data) { return format_hex_pretty(data.data(), data.size()); } | ||||
| std::string format_hex_pretty(const std::string &data) { | ||||
| std::string format_hex_pretty(const std::vector<uint16_t> &data, char separator, bool show_length) { | ||||
|   return format_hex_pretty(data.data(), data.size(), separator, show_length); | ||||
| } | ||||
| std::string format_hex_pretty(const std::string &data, char separator, bool show_length) { | ||||
|   if (data.empty()) | ||||
|     return ""; | ||||
|   std::string ret; | ||||
|   ret.resize(3 * data.length() - 1); | ||||
|   uint8_t multiple = separator ? 3 : 2;  // 3 if separator is not \0, 2 otherwise | ||||
|   ret.resize(multiple * data.length() - (separator ? 1 : 0)); | ||||
|   for (size_t i = 0; i < data.length(); i++) { | ||||
|     ret[3 * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4); | ||||
|     ret[3 * i + 1] = format_hex_pretty_char(data[i] & 0x0F); | ||||
|     if (i != data.length() - 1) | ||||
|       ret[3 * i + 2] = '.'; | ||||
|     ret[multiple * i] = format_hex_pretty_char((data[i] & 0xF0) >> 4); | ||||
|     ret[multiple * i + 1] = format_hex_pretty_char(data[i] & 0x0F); | ||||
|     if (separator && i != data.length() - 1) | ||||
|       ret[multiple * i + 2] = separator; | ||||
|   } | ||||
|   if (data.length() > 4) | ||||
|   if (show_length && data.length() > 4) | ||||
|     return ret + " (" + std::to_string(data.length()) + ")"; | ||||
|   return ret; | ||||
| } | ||||
|   | ||||
| @@ -344,20 +344,149 @@ template<std::size_t N> std::string format_hex(const std::array<uint8_t, N> &dat | ||||
|   return format_hex(data.data(), data.size()); | ||||
| } | ||||
|  | ||||
| /// Format the byte array \p data of length \p len in pretty-printed, human-readable hex. | ||||
| std::string format_hex_pretty(const uint8_t *data, size_t length); | ||||
| /// Format the word array \p data of length \p len in pretty-printed, human-readable hex. | ||||
| std::string format_hex_pretty(const uint16_t *data, size_t length); | ||||
| /// Format the vector \p data in pretty-printed, human-readable hex. | ||||
| std::string format_hex_pretty(const std::vector<uint8_t> &data); | ||||
| /// Format the vector \p data in pretty-printed, human-readable hex. | ||||
| std::string format_hex_pretty(const std::vector<uint16_t> &data); | ||||
| /// Format the string \p data in pretty-printed, human-readable hex. | ||||
| std::string format_hex_pretty(const std::string &data); | ||||
| /// Format an unsigned integer in pretty-printed, human-readable hex, starting with the most significant byte. | ||||
| template<typename T, enable_if_t<std::is_unsigned<T>::value, int> = 0> std::string format_hex_pretty(T val) { | ||||
| /** Format a byte array in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Converts binary data to a hexadecimal string representation with customizable formatting. | ||||
|  * Each byte is displayed as a two-digit uppercase hex value, separated by the specified separator. | ||||
|  * Optionally includes the total byte count in parentheses at the end. | ||||
|  * | ||||
|  * @param data Pointer to the byte array to format. | ||||
|  * @param length Number of bytes in the array. | ||||
|  * @param separator Character to use between hex bytes (default: '.'). | ||||
|  * @param show_length Whether to append the byte count in parentheses (default: true). | ||||
|  * @return Formatted hex string, e.g., "A1.B2.C3.D4.E5 (5)" or "A1:B2:C3" depending on parameters. | ||||
|  * | ||||
|  * @note Returns empty string if data is nullptr or length is 0. | ||||
|  * @note The length will only be appended if show_length is true AND the length is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * uint8_t data[] = {0xA1, 0xB2, 0xC3}; | ||||
|  * format_hex_pretty(data, 3);           // Returns "A1.B2.C3" (no length shown for <= 4 parts) | ||||
|  * uint8_t data2[] = {0xA1, 0xB2, 0xC3, 0xD4, 0xE5}; | ||||
|  * format_hex_pretty(data2, 5);          // Returns "A1.B2.C3.D4.E5 (5)" | ||||
|  * format_hex_pretty(data2, 5, ':');     // Returns "A1:B2:C3:D4:E5 (5)" | ||||
|  * format_hex_pretty(data2, 5, '.', false); // Returns "A1.B2.C3.D4.E5" | ||||
|  * @endcode | ||||
|  */ | ||||
| std::string format_hex_pretty(const uint8_t *data, size_t length, char separator = '.', bool show_length = true); | ||||
|  | ||||
| /** Format a 16-bit word array in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Similar to the byte array version, but formats 16-bit words as 4-digit hex values. | ||||
|  * | ||||
|  * @param data Pointer to the 16-bit word array to format. | ||||
|  * @param length Number of 16-bit words in the array. | ||||
|  * @param separator Character to use between hex words (default: '.'). | ||||
|  * @param show_length Whether to append the word count in parentheses (default: true). | ||||
|  * @return Formatted hex string with 4-digit hex values per word. | ||||
|  * | ||||
|  * @note The length will only be appended if show_length is true AND the length is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * uint16_t data[] = {0xA1B2, 0xC3D4}; | ||||
|  * format_hex_pretty(data, 2); // Returns "A1B2.C3D4" (no length shown for <= 4 parts) | ||||
|  * uint16_t data2[] = {0xA1B2, 0xC3D4, 0xE5F6}; | ||||
|  * format_hex_pretty(data2, 3); // Returns "A1B2.C3D4.E5F6 (3)" | ||||
|  * @endcode | ||||
|  */ | ||||
| std::string format_hex_pretty(const uint16_t *data, size_t length, char separator = '.', bool show_length = true); | ||||
|  | ||||
| /** Format a byte vector in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Convenience overload for std::vector<uint8_t>. Formats each byte as a two-digit | ||||
|  * uppercase hex value with customizable separator. | ||||
|  * | ||||
|  * @param data Vector of bytes to format. | ||||
|  * @param separator Character to use between hex bytes (default: '.'). | ||||
|  * @param show_length Whether to append the byte count in parentheses (default: true). | ||||
|  * @return Formatted hex string representation of the vector contents. | ||||
|  * | ||||
|  * @note The length will only be appended if show_length is true AND the vector size is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * std::vector<uint8_t> data = {0xDE, 0xAD, 0xBE, 0xEF}; | ||||
|  * format_hex_pretty(data);        // Returns "DE.AD.BE.EF" (no length shown for <= 4 parts) | ||||
|  * std::vector<uint8_t> data2 = {0xDE, 0xAD, 0xBE, 0xEF, 0xCA}; | ||||
|  * format_hex_pretty(data2);       // Returns "DE.AD.BE.EF.CA (5)" | ||||
|  * format_hex_pretty(data2, '-');  // Returns "DE-AD-BE-EF-CA (5)" | ||||
|  * @endcode | ||||
|  */ | ||||
| std::string format_hex_pretty(const std::vector<uint8_t> &data, char separator = '.', bool show_length = true); | ||||
|  | ||||
| /** Format a 16-bit word vector in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Convenience overload for std::vector<uint16_t>. Each 16-bit word is formatted | ||||
|  * as a 4-digit uppercase hex value in big-endian order. | ||||
|  * | ||||
|  * @param data Vector of 16-bit words to format. | ||||
|  * @param separator Character to use between hex words (default: '.'). | ||||
|  * @param show_length Whether to append the word count in parentheses (default: true). | ||||
|  * @return Formatted hex string representation of the vector contents. | ||||
|  * | ||||
|  * @note The length will only be appended if show_length is true AND the vector size is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * std::vector<uint16_t> data = {0x1234, 0x5678}; | ||||
|  * format_hex_pretty(data); // Returns "1234.5678" (no length shown for <= 4 parts) | ||||
|  * std::vector<uint16_t> data2 = {0x1234, 0x5678, 0x9ABC}; | ||||
|  * format_hex_pretty(data2); // Returns "1234.5678.9ABC (3)" | ||||
|  * @endcode | ||||
|  */ | ||||
| std::string format_hex_pretty(const std::vector<uint16_t> &data, char separator = '.', bool show_length = true); | ||||
|  | ||||
| /** Format a string's bytes in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Treats each character in the string as a byte and formats it in hex. | ||||
|  * Useful for debugging binary data stored in std::string containers. | ||||
|  * | ||||
|  * @param data String whose bytes should be formatted as hex. | ||||
|  * @param separator Character to use between hex bytes (default: '.'). | ||||
|  * @param show_length Whether to append the byte count in parentheses (default: true). | ||||
|  * @return Formatted hex string representation of the string's byte contents. | ||||
|  * | ||||
|  * @note The length will only be appended if show_length is true AND the string length is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * std::string data = "ABC";  // ASCII: 0x41, 0x42, 0x43 | ||||
|  * format_hex_pretty(data);   // Returns "41.42.43" (no length shown for <= 4 parts) | ||||
|  * std::string data2 = "ABCDE"; | ||||
|  * format_hex_pretty(data2);  // Returns "41.42.43.44.45 (5)" | ||||
|  * @endcode | ||||
|  */ | ||||
| std::string format_hex_pretty(const std::string &data, char separator = '.', bool show_length = true); | ||||
|  | ||||
| /** Format an unsigned integer in pretty-printed, human-readable hex format. | ||||
|  * | ||||
|  * Converts the integer to big-endian byte order and formats each byte as hex. | ||||
|  * The most significant byte appears first in the output string. | ||||
|  * | ||||
|  * @tparam T Unsigned integer type (uint8_t, uint16_t, uint32_t, uint64_t, etc.). | ||||
|  * @param val The unsigned integer value to format. | ||||
|  * @param separator Character to use between hex bytes (default: '.'). | ||||
|  * @param show_length Whether to append the byte count in parentheses (default: true). | ||||
|  * @return Formatted hex string with most significant byte first. | ||||
|  * | ||||
|  * @note The length will only be appended if show_length is true AND sizeof(T) is greater than 4. | ||||
|  * | ||||
|  * Example: | ||||
|  * @code | ||||
|  * uint32_t value = 0x12345678; | ||||
|  * format_hex_pretty(value);        // Returns "12.34.56.78" (no length shown for <= 4 parts) | ||||
|  * uint64_t value2 = 0x123456789ABCDEF0; | ||||
|  * format_hex_pretty(value2);       // Returns "12.34.56.78.9A.BC.DE.F0 (8)" | ||||
|  * format_hex_pretty(value2, ':');  // Returns "12:34:56:78:9A:BC:DE:F0 (8)" | ||||
|  * format_hex_pretty<uint16_t>(0x1234); // Returns "12.34" | ||||
|  * @endcode | ||||
|  */ | ||||
| template<typename T, enable_if_t<std::is_unsigned<T>::value, int> = 0> | ||||
| std::string format_hex_pretty(T val, char separator = '.', bool show_length = true) { | ||||
|   val = convert_big_endian(val); | ||||
|   return format_hex_pretty(reinterpret_cast<uint8_t *>(&val), sizeof(T)); | ||||
|   return format_hex_pretty(reinterpret_cast<uint8_t *>(&val), sizeof(T), separator, show_length); | ||||
| } | ||||
|  | ||||
| /// Format the byte array \p data of length \p len in binary. | ||||
|   | ||||
| @@ -66,10 +66,8 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type | ||||
|  | ||||
|   if (delay == SCHEDULER_DONT_RUN) { | ||||
|     // Still need to cancel existing timer if name is not empty | ||||
|     if (this->is_name_valid_(name_cstr)) { | ||||
|       LockGuard guard{this->lock_}; | ||||
|       this->cancel_item_locked_(component, name_cstr, type); | ||||
|     } | ||||
|     LockGuard guard{this->lock_}; | ||||
|     this->cancel_item_locked_(component, name_cstr, type); | ||||
|     return; | ||||
|   } | ||||
|  | ||||
| @@ -125,10 +123,8 @@ void HOT Scheduler::set_timer_common_(Component *component, SchedulerItem::Type | ||||
|  | ||||
|   LockGuard guard{this->lock_}; | ||||
|   // If name is provided, do atomic cancel-and-add | ||||
|   if (this->is_name_valid_(name_cstr)) { | ||||
|     // Cancel existing items | ||||
|     this->cancel_item_locked_(component, name_cstr, type); | ||||
|   } | ||||
|   // Cancel existing items | ||||
|   this->cancel_item_locked_(component, name_cstr, type); | ||||
|   // Add new item directly to to_add_ | ||||
|   // since we have the lock held | ||||
|   this->to_add_.push_back(std::move(item)); | ||||
| @@ -442,10 +438,6 @@ bool HOT Scheduler::cancel_item_(Component *component, bool is_static_string, co | ||||
|   // Get the name as const char* | ||||
|   const char *name_cstr = this->get_name_cstr_(is_static_string, name_ptr); | ||||
|  | ||||
|   // Handle null or empty names | ||||
|   if (!this->is_name_valid_(name_cstr)) | ||||
|     return false; | ||||
|  | ||||
|   // obtain lock because this function iterates and can be called from non-loop task context | ||||
|   LockGuard guard{this->lock_}; | ||||
|   return this->cancel_item_locked_(component, name_cstr, type); | ||||
| @@ -453,6 +445,11 @@ bool HOT Scheduler::cancel_item_(Component *component, bool is_static_string, co | ||||
|  | ||||
| // Helper to cancel items by name - must be called with lock held | ||||
| bool HOT Scheduler::cancel_item_locked_(Component *component, const char *name_cstr, SchedulerItem::Type type) { | ||||
|   // Early return if name is invalid - no items to cancel | ||||
|   if (name_cstr == nullptr || name_cstr[0] == '\0') { | ||||
|     return false; | ||||
|   } | ||||
|  | ||||
|   size_t total_cancelled = 0; | ||||
|  | ||||
|   // Check all containers for matching items | ||||
|   | ||||
| @@ -150,9 +150,6 @@ class Scheduler { | ||||
|     return is_static_string ? static_cast<const char *>(name_ptr) : static_cast<const std::string *>(name_ptr)->c_str(); | ||||
|   } | ||||
|  | ||||
|   // Helper to check if a name is valid (not null and not empty) | ||||
|   inline bool is_name_valid_(const char *name) { return name != nullptr && name[0] != '\0'; } | ||||
|  | ||||
|   // Common implementation for cancel operations | ||||
|   bool cancel_item_(Component *component, bool is_static_string, const void *name_ptr, SchedulerItem::Type type); | ||||
|  | ||||
|   | ||||
| @@ -61,6 +61,7 @@ src_filter = | ||||
|     +<../tests/dummy_main.cpp> | ||||
|     +<../.temp/all-include.cpp> | ||||
| lib_ldf_mode = off | ||||
| lib_compat_mode = strict | ||||
|  | ||||
| ; This are common settings for all Arduino-framework based environments. | ||||
| [common:arduino] | ||||
|   | ||||
| @@ -13,7 +13,7 @@ platformio==6.1.18  # When updating platformio, also update /docker/Dockerfile | ||||
| esptool==4.9.0 | ||||
| click==8.1.7 | ||||
| esphome-dashboard==20250514.0 | ||||
| aioesphomeapi==34.1.0 | ||||
| aioesphomeapi==34.2.0 | ||||
| zeroconf==0.147.0 | ||||
| puremagic==1.30 | ||||
| ruamel.yaml==0.18.14 # dashboard_import | ||||
|   | ||||
| @@ -987,13 +987,24 @@ def build_message_type( | ||||
|  | ||||
|     # Add MESSAGE_TYPE method if this is a service message | ||||
|     if message_id is not None: | ||||
|         # Validate that message_id fits in uint8_t | ||||
|         if message_id > 255: | ||||
|             raise ValueError( | ||||
|                 f"Message ID {message_id} for {desc.name} exceeds uint8_t maximum (255)" | ||||
|             ) | ||||
|  | ||||
|         # Add static constexpr for message type | ||||
|         public_content.append(f"static constexpr uint16_t MESSAGE_TYPE = {message_id};") | ||||
|         public_content.append(f"static constexpr uint8_t MESSAGE_TYPE = {message_id};") | ||||
|  | ||||
|         # Add estimated size constant | ||||
|         estimated_size = calculate_message_estimated_size(desc) | ||||
|         # Validate that estimated_size fits in uint8_t | ||||
|         if estimated_size > 255: | ||||
|             raise ValueError( | ||||
|                 f"Estimated size {estimated_size} for {desc.name} exceeds uint8_t maximum (255)" | ||||
|             ) | ||||
|         public_content.append( | ||||
|             f"static constexpr uint16_t ESTIMATED_SIZE = {estimated_size};" | ||||
|             f"static constexpr uint8_t ESTIMATED_SIZE = {estimated_size};" | ||||
|         ) | ||||
|  | ||||
|         # Add message_name method inline in header | ||||
|   | ||||
| @@ -270,7 +270,7 @@ def lint_newline(fname): | ||||
|     return "File contains Windows newline. Please set your editor to Unix newline mode." | ||||
|  | ||||
|  | ||||
| @lint_content_check(exclude=["*.svg"]) | ||||
| @lint_content_check(exclude=["*.svg", ".clang-tidy.hash"]) | ||||
| def lint_end_newline(fname, content): | ||||
|     if content and not content.endswith("\n"): | ||||
|         return "File does not end with a newline, please add an empty line at the end of the file." | ||||
|   | ||||
| @@ -22,6 +22,7 @@ from helpers import ( | ||||
|     git_ls_files, | ||||
|     load_idedata, | ||||
|     print_error_for_file, | ||||
|     print_file_list, | ||||
|     root_path, | ||||
|     temp_header_file, | ||||
| ) | ||||
| @@ -218,13 +219,14 @@ def main(): | ||||
|     ) | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     idedata = load_idedata(args.environment) | ||||
|     options = clang_options(idedata) | ||||
|  | ||||
|     files = [] | ||||
|     for path in git_ls_files(["*.cpp"]): | ||||
|         files.append(os.path.relpath(path, os.getcwd())) | ||||
|  | ||||
|     # Print initial file count if it's large | ||||
|     if len(files) > 50: | ||||
|         print(f"Found {len(files)} total files to process") | ||||
|  | ||||
|     if args.files: | ||||
|         # Match against files specified on command-line | ||||
|         file_name_re = re.compile("|".join(args.files)) | ||||
| @@ -240,10 +242,28 @@ def main(): | ||||
|  | ||||
|     if args.split_num: | ||||
|         files = split_list(files, args.split_num)[args.split_at - 1] | ||||
|         print(f"Split {args.split_at}/{args.split_num}: checking {len(files)} files") | ||||
|  | ||||
|     # Print file count before adding header file | ||||
|     print(f"\nTotal files to check: {len(files)}") | ||||
|  | ||||
|     # Early exit if no files to check | ||||
|     if not files: | ||||
|         print("No files to check - exiting early") | ||||
|         return 0 | ||||
|  | ||||
|     # Only build header file if we have actual files to check | ||||
|     if args.all_headers and args.split_at in (None, 1): | ||||
|         build_all_include() | ||||
|         files.insert(0, temp_header_file) | ||||
|         print(f"Added all-include header file, new total: {len(files)}") | ||||
|  | ||||
|     # Print final file list before loading idedata | ||||
|     print_file_list(files, "Final files to process:") | ||||
|  | ||||
|     # Load idedata and options only if we have files to check | ||||
|     idedata = load_idedata(args.environment) | ||||
|     options = clang_options(idedata) | ||||
|  | ||||
|     tmpdir = None | ||||
|     if args.fix: | ||||
|   | ||||
							
								
								
									
										188
									
								
								script/clang_tidy_hash.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										188
									
								
								script/clang_tidy_hash.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,188 @@ | ||||
| #!/usr/bin/env python3 | ||||
| """Calculate and manage hash for clang-tidy configuration.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import argparse | ||||
| import hashlib | ||||
| from pathlib import Path | ||||
| import re | ||||
| import sys | ||||
|  | ||||
| # Add the script directory to path to import helpers | ||||
| script_dir = Path(__file__).parent | ||||
| sys.path.insert(0, str(script_dir)) | ||||
|  | ||||
|  | ||||
| def read_file_lines(path: Path) -> list[str]: | ||||
|     """Read lines from a file.""" | ||||
|     with open(path) as f: | ||||
|         return f.readlines() | ||||
|  | ||||
|  | ||||
| def parse_requirement_line(line: str) -> tuple[str, str] | None: | ||||
|     """Parse a requirement line and return (package, original_line) or None. | ||||
|  | ||||
|     Handles formats like: | ||||
|     - package==1.2.3 | ||||
|     - package==1.2.3  # comment | ||||
|     - package>=1.2.3,<2.0.0 | ||||
|     """ | ||||
|     original_line = line.strip() | ||||
|  | ||||
|     # Extract the part before any comment for parsing | ||||
|     parse_line = line | ||||
|     if "#" in parse_line: | ||||
|         parse_line = parse_line[: parse_line.index("#")] | ||||
|  | ||||
|     parse_line = parse_line.strip() | ||||
|     if not parse_line: | ||||
|         return None | ||||
|  | ||||
|     # Use regex to extract package name | ||||
|     # This matches package names followed by version operators | ||||
|     match = re.match(r"^([a-zA-Z0-9_-]+)(==|>=|<=|>|<|!=|~=)(.+)$", parse_line) | ||||
|     if match: | ||||
|         return (match.group(1), original_line)  # Return package name and original line | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def get_clang_tidy_version_from_requirements() -> str: | ||||
|     """Get clang-tidy version from requirements_dev.txt""" | ||||
|     requirements_path = Path(__file__).parent.parent / "requirements_dev.txt" | ||||
|     lines = read_file_lines(requirements_path) | ||||
|  | ||||
|     for line in lines: | ||||
|         parsed = parse_requirement_line(line) | ||||
|         if parsed and parsed[0] == "clang-tidy": | ||||
|             # Return the original line (preserves comments) | ||||
|             return parsed[1] | ||||
|  | ||||
|     return "clang-tidy version not found" | ||||
|  | ||||
|  | ||||
| def extract_platformio_flags() -> str: | ||||
|     """Extract clang-tidy related flags from platformio.ini""" | ||||
|     flags: list[str] = [] | ||||
|     in_clangtidy_section = False | ||||
|  | ||||
|     platformio_path = Path(__file__).parent.parent / "platformio.ini" | ||||
|     lines = read_file_lines(platformio_path) | ||||
|     for line in lines: | ||||
|         line = line.strip() | ||||
|         if line.startswith("[flags:clangtidy]"): | ||||
|             in_clangtidy_section = True | ||||
|             continue | ||||
|         elif line.startswith("[") and in_clangtidy_section: | ||||
|             break | ||||
|         elif in_clangtidy_section and line and not line.startswith("#"): | ||||
|             flags.append(line) | ||||
|  | ||||
|     return "\n".join(sorted(flags)) | ||||
|  | ||||
|  | ||||
| def read_file_bytes(path: Path) -> bytes: | ||||
|     """Read bytes from a file.""" | ||||
|     with open(path, "rb") as f: | ||||
|         return f.read() | ||||
|  | ||||
|  | ||||
| def calculate_clang_tidy_hash() -> str: | ||||
|     """Calculate hash of clang-tidy configuration and version""" | ||||
|     hasher = hashlib.sha256() | ||||
|  | ||||
|     # Hash .clang-tidy file | ||||
|     clang_tidy_path = Path(__file__).parent.parent / ".clang-tidy" | ||||
|     content = read_file_bytes(clang_tidy_path) | ||||
|     hasher.update(content) | ||||
|  | ||||
|     # Hash clang-tidy version from requirements_dev.txt | ||||
|     version = get_clang_tidy_version_from_requirements() | ||||
|     hasher.update(version.encode()) | ||||
|  | ||||
|     # Hash relevant platformio.ini sections | ||||
|     pio_flags = extract_platformio_flags() | ||||
|     hasher.update(pio_flags.encode()) | ||||
|  | ||||
|     return hasher.hexdigest() | ||||
|  | ||||
|  | ||||
| def read_stored_hash() -> str | None: | ||||
|     """Read the stored hash from file""" | ||||
|     hash_file = Path(__file__).parent.parent / ".clang-tidy.hash" | ||||
|     if hash_file.exists(): | ||||
|         lines = read_file_lines(hash_file) | ||||
|         return lines[0].strip() if lines else None | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def write_file_content(path: Path, content: str) -> None: | ||||
|     """Write content to a file.""" | ||||
|     with open(path, "w") as f: | ||||
|         f.write(content) | ||||
|  | ||||
|  | ||||
| def write_hash(hash_value: str) -> None: | ||||
|     """Write hash to file""" | ||||
|     hash_file = Path(__file__).parent.parent / ".clang-tidy.hash" | ||||
|     write_file_content(hash_file, hash_value) | ||||
|  | ||||
|  | ||||
| def main() -> None: | ||||
|     parser = argparse.ArgumentParser(description="Manage clang-tidy configuration hash") | ||||
|     parser.add_argument( | ||||
|         "--check", | ||||
|         action="store_true", | ||||
|         help="Check if full scan needed (exit 0 if needed)", | ||||
|     ) | ||||
|     parser.add_argument("--update", action="store_true", help="Update the hash file") | ||||
|     parser.add_argument( | ||||
|         "--update-if-changed", | ||||
|         action="store_true", | ||||
|         help="Update hash only if configuration changed (for pre-commit)", | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "--verify", action="store_true", help="Verify hash matches (for CI)" | ||||
|     ) | ||||
|  | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     current_hash = calculate_clang_tidy_hash() | ||||
|     stored_hash = read_stored_hash() | ||||
|  | ||||
|     if args.check: | ||||
|         # Exit 0 if full scan needed (hash changed or no hash file) | ||||
|         sys.exit(0 if current_hash != stored_hash else 1) | ||||
|  | ||||
|     elif args.update: | ||||
|         write_hash(current_hash) | ||||
|         print(f"Hash updated: {current_hash}") | ||||
|  | ||||
|     elif args.update_if_changed: | ||||
|         if current_hash != stored_hash: | ||||
|             write_hash(current_hash) | ||||
|             print(f"Clang-tidy hash updated: {current_hash}") | ||||
|             # Exit 0 so pre-commit can stage the file | ||||
|             sys.exit(0) | ||||
|         else: | ||||
|             print("Clang-tidy hash unchanged") | ||||
|             sys.exit(0) | ||||
|  | ||||
|     elif args.verify: | ||||
|         if current_hash != stored_hash: | ||||
|             print("ERROR: Clang-tidy configuration has changed but hash not updated!") | ||||
|             print(f"Expected: {current_hash}") | ||||
|             print(f"Found: {stored_hash}") | ||||
|             print("\nPlease run: script/clang_tidy_hash.py --update") | ||||
|             sys.exit(1) | ||||
|         print("Hash verification passed") | ||||
|  | ||||
|     else: | ||||
|         print(f"Current hash: {current_hash}") | ||||
|         print(f"Stored hash: {stored_hash}") | ||||
|         print(f"Match: {current_hash == stored_hash}") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										245
									
								
								script/determine-jobs.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										245
									
								
								script/determine-jobs.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,245 @@ | ||||
| #!/usr/bin/env python3 | ||||
| """Determine which CI jobs should run based on changed files. | ||||
|  | ||||
| This script is a centralized way to determine which CI jobs need to run based on | ||||
| what files have changed. It outputs JSON with the following structure: | ||||
|  | ||||
| { | ||||
|   "integration_tests": true/false, | ||||
|   "clang_tidy": true/false, | ||||
|   "clang_format": true/false, | ||||
|   "python_linters": true/false, | ||||
|   "changed_components": ["component1", "component2", ...], | ||||
|   "component_test_count": 5 | ||||
| } | ||||
|  | ||||
| The CI workflow uses this information to: | ||||
| - Skip or run integration tests | ||||
| - Skip or run clang-tidy (and whether to do a full scan) | ||||
| - Skip or run clang-format | ||||
| - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) | ||||
| - Determine which components to test individually | ||||
| - Decide how to split component tests (if there are many) | ||||
|  | ||||
| Usage: | ||||
|   python script/determine-jobs.py [-b BRANCH] | ||||
|  | ||||
| Options: | ||||
|   -b, --branch BRANCH  Branch to compare against (default: dev) | ||||
| """ | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| import argparse | ||||
| import json | ||||
| import os | ||||
| from pathlib import Path | ||||
| import subprocess | ||||
| import sys | ||||
| from typing import Any | ||||
|  | ||||
| from helpers import ( | ||||
|     CPP_FILE_EXTENSIONS, | ||||
|     ESPHOME_COMPONENTS_PATH, | ||||
|     PYTHON_FILE_EXTENSIONS, | ||||
|     changed_files, | ||||
|     get_all_dependencies, | ||||
|     get_components_from_integration_fixtures, | ||||
|     parse_list_components_output, | ||||
|     root_path, | ||||
| ) | ||||
|  | ||||
|  | ||||
| def should_run_integration_tests(branch: str | None = None) -> bool: | ||||
|     """Determine if integration tests should run based on changed files. | ||||
|  | ||||
|     This function is used by the CI workflow to intelligently skip integration tests when they're | ||||
|     not needed, saving significant CI time and resources. | ||||
|  | ||||
|     Integration tests will run when ANY of the following conditions are met: | ||||
|  | ||||
|     1. Core C++ files changed (esphome/core/*) | ||||
|        - Any .cpp, .h, .tcc files in the core directory | ||||
|        - These files contain fundamental functionality used throughout ESPHome | ||||
|        - Examples: esphome/core/component.cpp, esphome/core/application.h | ||||
|  | ||||
|     2. Core Python files changed (esphome/core/*.py) | ||||
|        - Only .py files in the esphome/core/ directory | ||||
|        - These are core Python files that affect the entire system | ||||
|        - Examples: esphome/core/config.py, esphome/core/__init__.py | ||||
|        - NOT included: esphome/*.py, esphome/dashboard/*.py, esphome/components/*/*.py | ||||
|  | ||||
|     3. Integration test files changed | ||||
|        - Any file in tests/integration/ directory | ||||
|        - This includes test files themselves and fixture YAML files | ||||
|        - Examples: tests/integration/test_api.py, tests/integration/fixtures/api.yaml | ||||
|  | ||||
|     4. Components used by integration tests (or their dependencies) changed | ||||
|        - The function parses all YAML files in tests/integration/fixtures/ | ||||
|        - Extracts which components are used in integration tests | ||||
|        - Recursively finds all dependencies of those components | ||||
|        - If any of these components have changes, tests must run | ||||
|        - Example: If api.yaml uses 'sensor' and 'api' components, and 'api' depends on 'socket', | ||||
|          then changes to sensor/, api/, or socket/ components trigger tests | ||||
|  | ||||
|     Args: | ||||
|         branch: Branch to compare against. If None, uses default. | ||||
|  | ||||
|     Returns: | ||||
|         True if integration tests should run, False otherwise. | ||||
|     """ | ||||
|     files = changed_files(branch) | ||||
|  | ||||
|     # Check if any core files changed (esphome/core/*) | ||||
|     for file in files: | ||||
|         if file.startswith("esphome/core/"): | ||||
|             return True | ||||
|  | ||||
|     # Check if any integration test files changed | ||||
|     if any("tests/integration" in file for file in files): | ||||
|         return True | ||||
|  | ||||
|     # Get all components used in integration tests and their dependencies | ||||
|     fixture_components = get_components_from_integration_fixtures() | ||||
|     all_required_components = get_all_dependencies(fixture_components) | ||||
|  | ||||
|     # Check if any required components changed | ||||
|     for file in files: | ||||
|         if file.startswith(ESPHOME_COMPONENTS_PATH): | ||||
|             parts = file.split("/") | ||||
|             if len(parts) >= 3: | ||||
|                 component = parts[2] | ||||
|                 if component in all_required_components: | ||||
|                     return True | ||||
|  | ||||
|     return False | ||||
|  | ||||
|  | ||||
| def should_run_clang_tidy(branch: str | None = None) -> bool: | ||||
|     """Determine if clang-tidy should run based on changed files. | ||||
|  | ||||
|     This function is used by the CI workflow to intelligently skip clang-tidy checks when they're | ||||
|     not needed, saving significant CI time and resources. | ||||
|  | ||||
|     Clang-tidy will run when ANY of the following conditions are met: | ||||
|  | ||||
|     1. Clang-tidy configuration changed | ||||
|        - The hash of .clang-tidy configuration file has changed | ||||
|        - The hash includes the .clang-tidy file, clang-tidy version from requirements_dev.txt, | ||||
|          and relevant platformio.ini sections | ||||
|        - When configuration changes, a full scan is needed to ensure all code complies | ||||
|          with the new rules | ||||
|        - Detected by script/clang_tidy_hash.py --check returning exit code 0 | ||||
|  | ||||
|     2. Any C++ source files changed | ||||
|        - Any file with C++ extensions: .cpp, .h, .hpp, .cc, .cxx, .c, .tcc | ||||
|        - Includes files anywhere in the repository, not just in esphome/ | ||||
|        - This ensures all C++ code is checked, including tests, examples, etc. | ||||
|        - Examples: esphome/core/component.cpp, tests/custom/my_component.h | ||||
|  | ||||
|     If the hash check fails for any reason, clang-tidy runs as a safety measure to ensure | ||||
|     code quality is maintained. | ||||
|  | ||||
|     Args: | ||||
|         branch: Branch to compare against. If None, uses default. | ||||
|  | ||||
|     Returns: | ||||
|         True if clang-tidy should run, False otherwise. | ||||
|     """ | ||||
|     # First check if clang-tidy configuration changed (full scan needed) | ||||
|     try: | ||||
|         result = subprocess.run( | ||||
|             [os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"], | ||||
|             capture_output=True, | ||||
|             check=False, | ||||
|         ) | ||||
|         # Exit 0 means hash changed (full scan needed) | ||||
|         if result.returncode == 0: | ||||
|             return True | ||||
|     except Exception: | ||||
|         # If hash check fails, run clang-tidy to be safe | ||||
|         return True | ||||
|  | ||||
|     return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS) | ||||
|  | ||||
|  | ||||
| def should_run_clang_format(branch: str | None = None) -> bool: | ||||
|     """Determine if clang-format should run based on changed files. | ||||
|  | ||||
|     This function is used by the CI workflow to skip clang-format checks when no C++ files | ||||
|     have changed, saving CI time and resources. | ||||
|  | ||||
|     Clang-format will run when any C++ source files have changed. | ||||
|  | ||||
|     Args: | ||||
|         branch: Branch to compare against. If None, uses default. | ||||
|  | ||||
|     Returns: | ||||
|         True if clang-format should run, False otherwise. | ||||
|     """ | ||||
|     return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS) | ||||
|  | ||||
|  | ||||
| def should_run_python_linters(branch: str | None = None) -> bool: | ||||
|     """Determine if Python linters (ruff, flake8, pylint, pyupgrade) should run based on changed files. | ||||
|  | ||||
|     This function is used by the CI workflow to skip Python linting checks when no Python files | ||||
|     have changed, saving CI time and resources. | ||||
|  | ||||
|     Python linters will run when any Python source files have changed. | ||||
|  | ||||
|     Args: | ||||
|         branch: Branch to compare against. If None, uses default. | ||||
|  | ||||
|     Returns: | ||||
|         True if Python linters should run, False otherwise. | ||||
|     """ | ||||
|     return _any_changed_file_endswith(branch, PYTHON_FILE_EXTENSIONS) | ||||
|  | ||||
|  | ||||
| def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) -> bool: | ||||
|     """Check if a changed file ends with any of the specified extensions.""" | ||||
|     return any(file.endswith(extensions) for file in changed_files(branch)) | ||||
|  | ||||
|  | ||||
| def main() -> None: | ||||
|     """Main function that determines which CI jobs to run.""" | ||||
|     parser = argparse.ArgumentParser( | ||||
|         description="Determine which CI jobs should run based on changed files" | ||||
|     ) | ||||
|     parser.add_argument( | ||||
|         "-b", "--branch", help="Branch to compare changed files against" | ||||
|     ) | ||||
|     args = parser.parse_args() | ||||
|  | ||||
|     # Determine what should run | ||||
|     run_integration = should_run_integration_tests(args.branch) | ||||
|     run_clang_tidy = should_run_clang_tidy(args.branch) | ||||
|     run_clang_format = should_run_clang_format(args.branch) | ||||
|     run_python_linters = should_run_python_linters(args.branch) | ||||
|  | ||||
|     # Get changed components using list-components.py for exact compatibility | ||||
|     script_path = Path(__file__).parent / "list-components.py" | ||||
|     cmd = [sys.executable, str(script_path), "--changed"] | ||||
|     if args.branch: | ||||
|         cmd.extend(["-b", args.branch]) | ||||
|  | ||||
|     result = subprocess.run(cmd, capture_output=True, text=True, check=True) | ||||
|     changed_components = parse_list_components_output(result.stdout) | ||||
|  | ||||
|     # Build output | ||||
|     output: dict[str, Any] = { | ||||
|         "integration_tests": run_integration, | ||||
|         "clang_tidy": run_clang_tidy, | ||||
|         "clang_format": run_clang_format, | ||||
|         "python_linters": run_python_linters, | ||||
|         "changed_components": changed_components, | ||||
|         "component_test_count": len(changed_components), | ||||
|     } | ||||
|  | ||||
|     # Output as JSON | ||||
|     print(json.dumps(output)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,8 +1,14 @@ | ||||
| from __future__ import annotations | ||||
|  | ||||
| from functools import cache | ||||
| import json | ||||
| import os | ||||
| import os.path | ||||
| from pathlib import Path | ||||
| import re | ||||
| import subprocess | ||||
| import time | ||||
| from typing import Any | ||||
|  | ||||
| import colorama | ||||
|  | ||||
| @@ -11,14 +17,42 @@ basepath = os.path.join(root_path, "esphome") | ||||
| temp_folder = os.path.join(root_path, ".temp") | ||||
| temp_header_file = os.path.join(temp_folder, "all-include.cpp") | ||||
|  | ||||
| # C++ file extensions used for clang-tidy and clang-format checks | ||||
| CPP_FILE_EXTENSIONS = (".cpp", ".h", ".hpp", ".cc", ".cxx", ".c", ".tcc") | ||||
|  | ||||
| def styled(color, msg, reset=True): | ||||
| # Python file extensions | ||||
| PYTHON_FILE_EXTENSIONS = (".py", ".pyi") | ||||
|  | ||||
| # YAML file extensions | ||||
| YAML_FILE_EXTENSIONS = (".yaml", ".yml") | ||||
|  | ||||
| # Component path prefix | ||||
| ESPHOME_COMPONENTS_PATH = "esphome/components/" | ||||
|  | ||||
|  | ||||
| def parse_list_components_output(output: str) -> list[str]: | ||||
|     """Parse the output from list-components.py script. | ||||
|  | ||||
|     The script outputs one component name per line. | ||||
|  | ||||
|     Args: | ||||
|         output: The stdout from list-components.py | ||||
|  | ||||
|     Returns: | ||||
|         List of component names, or empty list if no output | ||||
|     """ | ||||
|     if not output or not output.strip(): | ||||
|         return [] | ||||
|     return [c.strip() for c in output.strip().split("\n") if c.strip()] | ||||
|  | ||||
|  | ||||
| def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: | ||||
|     prefix = "".join(color) if isinstance(color, tuple) else color | ||||
|     suffix = colorama.Style.RESET_ALL if reset else "" | ||||
|     return prefix + msg + suffix | ||||
|  | ||||
|  | ||||
| def print_error_for_file(file, body): | ||||
| def print_error_for_file(file: str, body: str | None) -> None: | ||||
|     print( | ||||
|         styled(colorama.Fore.GREEN, "### File ") | ||||
|         + styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file) | ||||
| @@ -29,17 +63,22 @@ def print_error_for_file(file, body): | ||||
|         print() | ||||
|  | ||||
|  | ||||
| def build_all_include(): | ||||
| def build_all_include() -> None: | ||||
|     # Build a cpp file that includes all header files in this repo. | ||||
|     # Otherwise header-only integrations would not be tested by clang-tidy | ||||
|     headers = [] | ||||
|     for path in walk_files(basepath): | ||||
|         filetypes = (".h",) | ||||
|         ext = os.path.splitext(path)[1] | ||||
|         if ext in filetypes: | ||||
|             path = os.path.relpath(path, root_path) | ||||
|             include_p = path.replace(os.path.sep, "/") | ||||
|             headers.append(f'#include "{include_p}"') | ||||
|  | ||||
|     # Use git ls-files to find all .h files in the esphome directory | ||||
|     # This is much faster than walking the filesystem | ||||
|     cmd = ["git", "ls-files", "esphome/**/*.h"] | ||||
|     proc = subprocess.run(cmd, capture_output=True, text=True, check=True) | ||||
|  | ||||
|     # Process git output - git already returns paths relative to repo root | ||||
|     headers = [ | ||||
|         f'#include "{include_p}"' | ||||
|         for line in proc.stdout.strip().split("\n") | ||||
|         if (include_p := line.replace(os.path.sep, "/")) | ||||
|     ] | ||||
|  | ||||
|     headers.sort() | ||||
|     headers.append("") | ||||
|     content = "\n".join(headers) | ||||
| @@ -48,29 +87,87 @@ def build_all_include(): | ||||
|     p.write_text(content, encoding="utf-8") | ||||
|  | ||||
|  | ||||
| def walk_files(path): | ||||
|     for root, _, files in os.walk(path): | ||||
|         for name in files: | ||||
|             yield os.path.join(root, name) | ||||
|  | ||||
|  | ||||
| def get_output(*args): | ||||
| def get_output(*args: str) -> str: | ||||
|     with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: | ||||
|         output, _ = proc.communicate() | ||||
|     return output.decode("utf-8") | ||||
|  | ||||
|  | ||||
| def get_err(*args): | ||||
| def get_err(*args: str) -> str: | ||||
|     with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc: | ||||
|         _, err = proc.communicate() | ||||
|     return err.decode("utf-8") | ||||
|  | ||||
|  | ||||
| def splitlines_no_ends(string): | ||||
| def splitlines_no_ends(string: str) -> list[str]: | ||||
|     return [s.strip() for s in string.splitlines()] | ||||
|  | ||||
|  | ||||
| def changed_files(branch="dev"): | ||||
| def _get_pr_number_from_github_env() -> str | None: | ||||
|     """Extract PR number from GitHub environment variables. | ||||
|  | ||||
|     Returns: | ||||
|         PR number as string, or None if not found | ||||
|     """ | ||||
|     # First try parsing GITHUB_REF (fastest) | ||||
|     github_ref = os.environ.get("GITHUB_REF", "") | ||||
|     if "/pull/" in github_ref: | ||||
|         return github_ref.split("/pull/")[1].split("/")[0] | ||||
|  | ||||
|     # Fallback to GitHub event file | ||||
|     github_event_path = os.environ.get("GITHUB_EVENT_PATH") | ||||
|     if github_event_path and os.path.exists(github_event_path): | ||||
|         with open(github_event_path) as f: | ||||
|             event_data = json.load(f) | ||||
|             pr_data = event_data.get("pull_request", {}) | ||||
|             if pr_number := pr_data.get("number"): | ||||
|                 return str(pr_number) | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| @cache | ||||
| def _get_changed_files_github_actions() -> list[str] | None: | ||||
|     """Get changed files in GitHub Actions environment. | ||||
|  | ||||
|     Returns: | ||||
|         List of changed files, or None if should fall back to git method | ||||
|     """ | ||||
|     event_name = os.environ.get("GITHUB_EVENT_NAME") | ||||
|  | ||||
|     # For pull requests | ||||
|     if event_name == "pull_request": | ||||
|         pr_number = _get_pr_number_from_github_env() | ||||
|         if pr_number: | ||||
|             # Use GitHub CLI to get changed files directly | ||||
|             cmd = ["gh", "pr", "diff", pr_number, "--name-only"] | ||||
|             return _get_changed_files_from_command(cmd) | ||||
|  | ||||
|     # For pushes (including squash-and-merge) | ||||
|     elif event_name == "push": | ||||
|         # For push events, we want to check what changed in this commit | ||||
|         try: | ||||
|             # Get the changed files in the last commit | ||||
|             return _get_changed_files_from_command( | ||||
|                 ["git", "diff", "HEAD~1..HEAD", "--name-only"] | ||||
|             ) | ||||
|         except:  # noqa: E722 | ||||
|             # Fall back to the original method if this fails | ||||
|             pass | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def changed_files(branch: str | None = None) -> list[str]: | ||||
|     # In GitHub Actions, we can use the API to get changed files more efficiently | ||||
|     if os.environ.get("GITHUB_ACTIONS") == "true": | ||||
|         github_files = _get_changed_files_github_actions() | ||||
|         if github_files is not None: | ||||
|             return github_files | ||||
|  | ||||
|     # Original implementation for local development | ||||
|     if not branch:  # Treat None and empty string the same | ||||
|         branch = "dev" | ||||
|     check_remotes = ["upstream", "origin"] | ||||
|     check_remotes.extend(splitlines_no_ends(get_output("git", "remote"))) | ||||
|     for remote in check_remotes: | ||||
| @@ -83,25 +180,165 @@ def changed_files(branch="dev"): | ||||
|             pass | ||||
|     else: | ||||
|         raise ValueError("Git not configured") | ||||
|     command = ["git", "diff", merge_base, "--name-only"] | ||||
|     changed = splitlines_no_ends(get_output(*command)) | ||||
|     changed = [os.path.relpath(f, os.getcwd()) for f in changed] | ||||
|     changed.sort() | ||||
|     return changed | ||||
|     return _get_changed_files_from_command(["git", "diff", merge_base, "--name-only"]) | ||||
|  | ||||
|  | ||||
| def filter_changed(files): | ||||
| def _get_changed_files_from_command(command: list[str]) -> list[str]: | ||||
|     """Run a git command to get changed files and return them as a list.""" | ||||
|     proc = subprocess.run(command, capture_output=True, text=True, check=False) | ||||
|     if proc.returncode != 0: | ||||
|         raise Exception(f"Command failed: {' '.join(command)}\nstderr: {proc.stderr}") | ||||
|  | ||||
|     changed_files = splitlines_no_ends(proc.stdout) | ||||
|     changed_files = [os.path.relpath(f, os.getcwd()) for f in changed_files if f] | ||||
|     changed_files.sort() | ||||
|     return changed_files | ||||
|  | ||||
|  | ||||
| def get_changed_components() -> list[str] | None: | ||||
|     """Get list of changed components using list-components.py script. | ||||
|  | ||||
|     This function: | ||||
|     1. First checks if any core C++/header files (esphome/core/*.{cpp,h,hpp,cc,cxx,c}) changed - if so, returns None | ||||
|     2. Otherwise delegates to ./script/list-components.py --changed which: | ||||
|        - Analyzes all changed files | ||||
|        - Determines which components are affected (including dependencies) | ||||
|        - Returns a list of component names that need to be checked | ||||
|  | ||||
|     Returns: | ||||
|         - None: Core C++/header files changed, need full scan | ||||
|         - Empty list: No components changed (only non-component files changed) | ||||
|         - List of strings: Names of components that need checking (e.g., ["wifi", "mqtt"]) | ||||
|     """ | ||||
|     # Check if any core C++ or header files changed first | ||||
|     changed = changed_files() | ||||
|     files = [f for f in files if f in changed] | ||||
|     print("Changed files:") | ||||
|     if not files: | ||||
|         print("    No changed files!") | ||||
|     for c in files: | ||||
|         print(f"    {c}") | ||||
|     core_cpp_changed = any( | ||||
|         f.startswith("esphome/core/") | ||||
|         and f.endswith(CPP_FILE_EXTENSIONS[:-1])  # Exclude .tcc for core files | ||||
|         for f in changed | ||||
|     ) | ||||
|     if core_cpp_changed: | ||||
|         print("Core C++/header files changed - will run full clang-tidy scan") | ||||
|         return None | ||||
|  | ||||
|     # Use list-components.py to get changed components | ||||
|     script_path = os.path.join(root_path, "script", "list-components.py") | ||||
|     cmd = [script_path, "--changed"] | ||||
|  | ||||
|     try: | ||||
|         result = subprocess.run( | ||||
|             cmd, capture_output=True, text=True, check=True, close_fds=False | ||||
|         ) | ||||
|         return parse_list_components_output(result.stdout) | ||||
|     except subprocess.CalledProcessError: | ||||
|         # If the script fails, fall back to full scan | ||||
|         print("Could not determine changed components - will run full clang-tidy scan") | ||||
|         return None | ||||
|  | ||||
|  | ||||
| def _filter_changed_ci(files: list[str]) -> list[str]: | ||||
|     """Filter files based on changed components in CI environment. | ||||
|  | ||||
|     This function implements intelligent filtering to reduce CI runtime by only | ||||
|     checking files that could be affected by the changes. It handles three scenarios: | ||||
|  | ||||
|     1. Core C++/header files changed (returns None from get_changed_components): | ||||
|        - Triggered when any C++/header file in esphome/core/ is modified | ||||
|        - Action: Check ALL files (full scan) | ||||
|        - Reason: Core C++/header files are used throughout the codebase | ||||
|  | ||||
|     2. No components changed (returns empty list from get_changed_components): | ||||
|        - Triggered when only non-component files changed (e.g., scripts, configs) | ||||
|        - Action: Check only the specific non-component files that changed | ||||
|        - Example: If only script/clang-tidy changed, only check that file | ||||
|  | ||||
|     3. Specific components changed (returns list of component names): | ||||
|        - Component detection done by: ./script/list-components.py --changed | ||||
|        - That script analyzes which components are affected by the changed files | ||||
|          INCLUDING their dependencies | ||||
|        - Action: Check ALL files in each component that list-components.py identifies | ||||
|        - Example: If wifi.cpp changed, list-components.py might return ["wifi", "network"] | ||||
|                  if network depends on wifi. We then check ALL files in both | ||||
|                  esphome/components/wifi/ and esphome/components/network/ | ||||
|        - Reason: Component files often have interdependencies (headers, base classes) | ||||
|  | ||||
|     Args: | ||||
|         files: List of all files that clang-tidy would normally check | ||||
|  | ||||
|     Returns: | ||||
|         Filtered list of files to check | ||||
|     """ | ||||
|     components = get_changed_components() | ||||
|     if components is None: | ||||
|         # Scenario 1: Core files changed or couldn't determine components | ||||
|         # Action: Return all files for full scan | ||||
|         return files | ||||
|  | ||||
|     if not components: | ||||
|         # Scenario 2: No components changed - only non-component files changed | ||||
|         # Action: Check only the specific non-component files that changed | ||||
|         changed = changed_files() | ||||
|         files = [ | ||||
|             f | ||||
|             for f in files | ||||
|             if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH) | ||||
|         ] | ||||
|         if not files: | ||||
|             print("No files changed") | ||||
|         return files | ||||
|  | ||||
|     # Scenario 3: Specific components changed | ||||
|     # Action: Check ALL files in each changed component | ||||
|     # Convert component list to set for O(1) lookups | ||||
|     component_set = set(components) | ||||
|     print(f"Changed components: {', '.join(sorted(components))}") | ||||
|  | ||||
|     # The 'files' parameter contains ALL files in the codebase that clang-tidy would check. | ||||
|     # We filter this down to only files in the changed components. | ||||
|     # We check ALL files in each changed component (not just the changed files) | ||||
|     # because changes in one file can affect other files in the same component. | ||||
|     filtered_files = [] | ||||
|     for f in files: | ||||
|         if f.startswith(ESPHOME_COMPONENTS_PATH): | ||||
|             # Check if file belongs to any of the changed components | ||||
|             parts = f.split("/") | ||||
|             if len(parts) >= 3 and parts[2] in component_set: | ||||
|                 filtered_files.append(f) | ||||
|  | ||||
|     return filtered_files | ||||
|  | ||||
|  | ||||
| def _filter_changed_local(files: list[str]) -> list[str]: | ||||
|     """Filter files based on git changes for local development. | ||||
|  | ||||
|     Args: | ||||
|         files: List of all files to filter | ||||
|  | ||||
|     Returns: | ||||
|         Filtered list of files to check | ||||
|     """ | ||||
|     # For local development, just check changed files directly | ||||
|     changed = changed_files() | ||||
|     return [f for f in files if f in changed] | ||||
|  | ||||
|  | ||||
| def filter_changed(files: list[str]) -> list[str]: | ||||
|     """Filter files to only those that changed or are in changed components. | ||||
|  | ||||
|     Args: | ||||
|         files: List of files to filter | ||||
|     """ | ||||
|     # When running from CI, use component-based filtering | ||||
|     if os.environ.get("GITHUB_ACTIONS") == "true": | ||||
|         files = _filter_changed_ci(files) | ||||
|     else: | ||||
|         files = _filter_changed_local(files) | ||||
|  | ||||
|     print_file_list(files, "Files to check after filtering:") | ||||
|     return files | ||||
|  | ||||
|  | ||||
| def filter_grep(files, value): | ||||
| def filter_grep(files: list[str], value: str) -> list[str]: | ||||
|     matched = [] | ||||
|     for file in files: | ||||
|         with open(file, encoding="utf-8") as handle: | ||||
| @@ -111,7 +348,7 @@ def filter_grep(files, value): | ||||
|     return matched | ||||
|  | ||||
|  | ||||
| def git_ls_files(patterns=None): | ||||
| def git_ls_files(patterns: list[str] | None = None) -> dict[str, int]: | ||||
|     command = ["git", "ls-files", "-s"] | ||||
|     if patterns is not None: | ||||
|         command.extend(patterns) | ||||
| @@ -121,7 +358,10 @@ def git_ls_files(patterns=None): | ||||
|     return {s[3].strip(): int(s[0]) for s in lines} | ||||
|  | ||||
|  | ||||
| def load_idedata(environment): | ||||
| def load_idedata(environment: str) -> dict[str, Any]: | ||||
|     start_time = time.time() | ||||
|     print(f"Loading IDE data for environment '{environment}'...") | ||||
|  | ||||
|     platformio_ini = Path(root_path) / "platformio.ini" | ||||
|     temp_idedata = Path(temp_folder) / f"idedata-{environment}.json" | ||||
|     changed = False | ||||
| @@ -142,7 +382,10 @@ def load_idedata(environment): | ||||
|             changed = True | ||||
|  | ||||
|     if not changed: | ||||
|         return json.loads(temp_idedata.read_text()) | ||||
|         data = json.loads(temp_idedata.read_text()) | ||||
|         elapsed = time.time() - start_time | ||||
|         print(f"IDE data loaded from cache in {elapsed:.2f} seconds") | ||||
|         return data | ||||
|  | ||||
|     # ensure temp directory exists before running pio, as it writes sdkconfig to it | ||||
|     Path(temp_folder).mkdir(exist_ok=True) | ||||
| @@ -158,6 +401,9 @@ def load_idedata(environment): | ||||
|         match = re.search(r'{\s*".*}', stdout.decode("utf-8")) | ||||
|         data = json.loads(match.group()) | ||||
|     temp_idedata.write_text(json.dumps(data, indent=2) + "\n") | ||||
|  | ||||
|     elapsed = time.time() - start_time | ||||
|     print(f"IDE data generated and cached in {elapsed:.2f} seconds") | ||||
|     return data | ||||
|  | ||||
|  | ||||
| @@ -196,6 +442,29 @@ def get_binary(name: str, version: str) -> str: | ||||
|         raise | ||||
|  | ||||
|  | ||||
| def print_file_list( | ||||
|     files: list[str], title: str = "Files:", max_files: int = 20 | ||||
| ) -> None: | ||||
|     """Print a list of files with optional truncation for large lists. | ||||
|  | ||||
|     Args: | ||||
|         files: List of file paths to print | ||||
|         title: Title to print before the list | ||||
|         max_files: Maximum number of files to show before truncating (default: 20) | ||||
|     """ | ||||
|     print(title) | ||||
|     if not files: | ||||
|         print("    No files to check!") | ||||
|     elif len(files) <= max_files: | ||||
|         for f in sorted(files): | ||||
|             print(f"    {f}") | ||||
|     else: | ||||
|         sorted_files = sorted(files) | ||||
|         for f in sorted_files[:10]: | ||||
|             print(f"    {f}") | ||||
|         print(f"    ... and {len(files) - 10} more files") | ||||
|  | ||||
|  | ||||
| def get_usable_cpu_count() -> int: | ||||
|     """Return the number of CPUs that can be used for processes. | ||||
|  | ||||
| @@ -205,3 +474,83 @@ def get_usable_cpu_count() -> int: | ||||
|     return ( | ||||
|         os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count() | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_all_dependencies(component_names: set[str]) -> set[str]: | ||||
|     """Get all dependencies for a set of components. | ||||
|  | ||||
|     Args: | ||||
|         component_names: Set of component names to get dependencies for | ||||
|  | ||||
|     Returns: | ||||
|         Set of all components including dependencies and auto-loaded components | ||||
|     """ | ||||
|     from esphome.const import KEY_CORE | ||||
|     from esphome.core import CORE | ||||
|     from esphome.loader import get_component | ||||
|  | ||||
|     all_components: set[str] = set(component_names) | ||||
|  | ||||
|     # Reset CORE to ensure clean state | ||||
|     CORE.reset() | ||||
|  | ||||
|     # Set up fake config path for component loading | ||||
|     root = Path(__file__).parent.parent | ||||
|     CORE.config_path = str(root) | ||||
|     CORE.data[KEY_CORE] = {} | ||||
|  | ||||
|     # Keep finding dependencies until no new ones are found | ||||
|     while True: | ||||
|         new_components: set[str] = set() | ||||
|  | ||||
|         for comp_name in all_components: | ||||
|             comp = get_component(comp_name) | ||||
|             if not comp: | ||||
|                 continue | ||||
|  | ||||
|             # Add dependencies (extract component name before '.') | ||||
|             new_components.update(dep.split(".")[0] for dep in comp.dependencies) | ||||
|  | ||||
|             # Add auto_load components | ||||
|             new_components.update(comp.auto_load) | ||||
|  | ||||
|         # Check if we found any new components | ||||
|         new_components -= all_components | ||||
|         if not new_components: | ||||
|             break | ||||
|  | ||||
|         all_components.update(new_components) | ||||
|  | ||||
|     return all_components | ||||
|  | ||||
|  | ||||
| def get_components_from_integration_fixtures() -> set[str]: | ||||
|     """Extract all components used in integration test fixtures. | ||||
|  | ||||
|     Returns: | ||||
|         Set of component names used in integration test fixtures | ||||
|     """ | ||||
|     import yaml | ||||
|  | ||||
|     components: set[str] = set() | ||||
|     fixtures_dir = Path(__file__).parent.parent / "tests" / "integration" / "fixtures" | ||||
|  | ||||
|     for yaml_file in fixtures_dir.glob("*.yaml"): | ||||
|         with open(yaml_file) as f: | ||||
|             config: dict[str, any] | None = yaml.safe_load(f) | ||||
|             if not config: | ||||
|                 continue | ||||
|  | ||||
|             # Add all top-level component keys | ||||
|             components.update(config.keys()) | ||||
|  | ||||
|             # Add platform components (e.g., output.template) | ||||
|             for value in config.values(): | ||||
|                 if not isinstance(value, list): | ||||
|                     continue | ||||
|  | ||||
|                 for item in value: | ||||
|                     if isinstance(item, dict) and "platform" in item: | ||||
|                         components.add(item["platform"]) | ||||
|  | ||||
|     return components | ||||
|   | ||||
| @@ -20,6 +20,12 @@ def filter_component_files(str): | ||||
|     return str.startswith("esphome/components/") | str.startswith("tests/components/") | ||||
|  | ||||
|  | ||||
| def get_all_component_files() -> list[str]: | ||||
|     """Get all component files from git.""" | ||||
|     files = git_ls_files() | ||||
|     return list(filter(filter_component_files, files)) | ||||
|  | ||||
|  | ||||
| def extract_component_names_array_from_files_array(files): | ||||
|     components = [] | ||||
|     for file in files: | ||||
| @@ -165,17 +171,20 @@ def main(): | ||||
|     if args.branch and not args.changed: | ||||
|         parser.error("--branch requires --changed") | ||||
|  | ||||
|     files = git_ls_files() | ||||
|     files = filter(filter_component_files, files) | ||||
|  | ||||
|     if args.changed: | ||||
|         if args.branch: | ||||
|             changed = changed_files(args.branch) | ||||
|         else: | ||||
|             changed = changed_files() | ||||
|         # When --changed is passed, only get the changed files | ||||
|         changed = changed_files(args.branch) | ||||
|  | ||||
|         # If any base test file(s) changed, there's no need to filter out components | ||||
|         if not any("tests/test_build_components" in file for file in changed): | ||||
|             files = [f for f in files if f in changed] | ||||
|         if any("tests/test_build_components" in file for file in changed): | ||||
|             # Need to get all component files | ||||
|             files = get_all_component_files() | ||||
|         else: | ||||
|             # Only look at changed component files | ||||
|             files = [f for f in changed if filter_component_files(f)] | ||||
|     else: | ||||
|         # Get all component files | ||||
|         files = get_all_component_files() | ||||
|  | ||||
|     for c in get_components(files, args.changed): | ||||
|         print(c) | ||||
|   | ||||
| @@ -1,29 +1,71 @@ | ||||
| """Fixtures for component tests.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Callable, Generator | ||||
| from pathlib import Path | ||||
| import sys | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| # Add package root to python path | ||||
| here = Path(__file__).parent | ||||
| package_root = here.parent.parent | ||||
| sys.path.insert(0, package_root.as_posix()) | ||||
|  | ||||
| import pytest  # noqa: E402 | ||||
|  | ||||
| from esphome.__main__ import generate_cpp_contents  # noqa: E402 | ||||
| from esphome.config import read_config  # noqa: E402 | ||||
| from esphome.core import CORE  # noqa: E402 | ||||
|  | ||||
|  | ||||
| @pytest.fixture(autouse=True) | ||||
| def config_path(request: pytest.FixtureRequest) -> Generator[None]: | ||||
|     """Set CORE.config_path to the component's config directory and reset it after the test.""" | ||||
|     original_path = CORE.config_path | ||||
|     config_dir = Path(request.fspath).parent / "config" | ||||
|  | ||||
|     # Check if config directory exists, if not use parent directory | ||||
|     if config_dir.exists(): | ||||
|         # Set config_path to a dummy yaml file in the config directory | ||||
|         # This ensures CORE.config_dir points to the config directory | ||||
|         CORE.config_path = str(config_dir / "dummy.yaml") | ||||
|     else: | ||||
|         CORE.config_path = str(Path(request.fspath).parent / "dummy.yaml") | ||||
|  | ||||
|     yield | ||||
|     CORE.config_path = original_path | ||||
|  | ||||
|  | ||||
| @pytest.fixture | ||||
| def generate_main(): | ||||
| def component_fixture_path(request: pytest.FixtureRequest) -> Callable[[str], Path]: | ||||
|     """Return a function to get absolute paths relative to the component's fixtures directory.""" | ||||
|  | ||||
|     def _get_path(file_name: str) -> Path: | ||||
|         """Get the absolute path of a file relative to the component's fixtures directory.""" | ||||
|         return (Path(request.fspath).parent / "fixtures" / file_name).absolute() | ||||
|  | ||||
|     return _get_path | ||||
|  | ||||
|  | ||||
| @pytest.fixture | ||||
| def component_config_path(request: pytest.FixtureRequest) -> Callable[[str], Path]: | ||||
|     """Return a function to get absolute paths relative to the component's config directory.""" | ||||
|  | ||||
|     def _get_path(file_name: str) -> Path: | ||||
|         """Get the absolute path of a file relative to the component's config directory.""" | ||||
|         return (Path(request.fspath).parent / "config" / file_name).absolute() | ||||
|  | ||||
|     return _get_path | ||||
|  | ||||
|  | ||||
| @pytest.fixture | ||||
| def generate_main() -> Generator[Callable[[str | Path], str]]: | ||||
|     """Generates the C++ main.cpp file and returns it in string form.""" | ||||
|  | ||||
|     def generator(path: str) -> str: | ||||
|         CORE.config_path = path | ||||
|     def generator(path: str | Path) -> str: | ||||
|         CORE.config_path = str(path) | ||||
|         CORE.config = read_config({}) | ||||
|         generate_cpp_contents(CORE.config) | ||||
|         print(CORE.cpp_main_section) | ||||
|         return CORE.cpp_main_section | ||||
|  | ||||
|     yield generator | ||||
|   | ||||
							
								
								
									
										0
									
								
								tests/component_tests/image/config/bad.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								tests/component_tests/image/config/bad.png
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tests/component_tests/image/config/image.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/component_tests/image/config/image.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 685 B | 
							
								
								
									
										20
									
								
								tests/component_tests/image/config/image_test.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								tests/component_tests/image/config/image_test.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| esphome: | ||||
|   name: test | ||||
|  | ||||
| esp32: | ||||
|   board: esp32s3box | ||||
|  | ||||
| image: | ||||
|   - file: image.png | ||||
|     byte_order: little_endian | ||||
|     id: cat_img | ||||
|     type: rgb565 | ||||
|  | ||||
| spi: | ||||
|   mosi_pin: 6 | ||||
|   clk_pin: 7 | ||||
|  | ||||
| display: | ||||
|   - platform: mipi_spi | ||||
|     id: lcd_display | ||||
|     model: s3box | ||||
							
								
								
									
										183
									
								
								tests/component_tests/image/test_init.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										183
									
								
								tests/component_tests/image/test_init.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,183 @@ | ||||
| """Tests for image configuration validation.""" | ||||
|  | ||||
| from __future__ import annotations | ||||
|  | ||||
| from collections.abc import Callable | ||||
| from pathlib import Path | ||||
| from typing import Any | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from esphome import config_validation as cv | ||||
| from esphome.components.image import CONFIG_SCHEMA | ||||
|  | ||||
|  | ||||
| @pytest.mark.parametrize( | ||||
|     ("config", "error_match"), | ||||
|     [ | ||||
|         pytest.param( | ||||
|             "a string", | ||||
|             "Badly formed image configuration, expected a list or a dictionary", | ||||
|             id="invalid_string_config", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             {"id": "image_id", "type": "rgb565"}, | ||||
|             r"required key not provided @ data\[0\]\['file'\]", | ||||
|             id="missing_file", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             {"file": "image.png", "type": "rgb565"}, | ||||
|             r"required key not provided @ data\[0\]\['id'\]", | ||||
|             id="missing_id", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             {"id": "mdi_id", "file": "mdi:weather-##", "type": "rgb565"}, | ||||
|             "Could not parse mdi icon name", | ||||
|             id="invalid_mdi_icon", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "id": "image_id", | ||||
|                 "file": "image.png", | ||||
|                 "type": "binary", | ||||
|                 "transparency": "alpha_channel", | ||||
|             }, | ||||
|             "Image format 'BINARY' cannot have transparency", | ||||
|             id="binary_with_transparency", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "id": "image_id", | ||||
|                 "file": "image.png", | ||||
|                 "type": "rgb565", | ||||
|                 "transparency": "chroma_key", | ||||
|                 "invert_alpha": True, | ||||
|             }, | ||||
|             "No alpha channel to invert", | ||||
|             id="invert_alpha_without_alpha_channel", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "id": "image_id", | ||||
|                 "file": "image.png", | ||||
|                 "type": "binary", | ||||
|                 "byte_order": "big_endian", | ||||
|             }, | ||||
|             "Image format 'BINARY' does not support byte order configuration", | ||||
|             id="binary_with_byte_order", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             {"id": "image_id", "file": "bad.png", "type": "binary"}, | ||||
|             "File can't be opened as image", | ||||
|             id="invalid_image_file", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             {"defaults": {}, "images": [{"id": "image_id", "file": "image.png"}]}, | ||||
|             "Type is required either in the image config or in the defaults", | ||||
|             id="missing_type_in_defaults", | ||||
|         ), | ||||
|     ], | ||||
| ) | ||||
| def test_image_configuration_errors( | ||||
|     config: Any, | ||||
|     error_match: str, | ||||
| ) -> None: | ||||
|     """Test detection of invalid configuration.""" | ||||
|     with pytest.raises(cv.Invalid, match=error_match): | ||||
|         CONFIG_SCHEMA(config) | ||||
|  | ||||
|  | ||||
| @pytest.mark.parametrize( | ||||
|     "config", | ||||
|     [ | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "id": "image_id", | ||||
|                 "file": "image.png", | ||||
|                 "type": "rgb565", | ||||
|                 "transparency": "chroma_key", | ||||
|                 "byte_order": "little_endian", | ||||
|                 "dither": "FloydSteinberg", | ||||
|                 "resize": "100x100", | ||||
|                 "invert_alpha": False, | ||||
|             }, | ||||
|             id="single_image_all_options", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             [ | ||||
|                 { | ||||
|                     "id": "image_id", | ||||
|                     "file": "image.png", | ||||
|                     "type": "binary", | ||||
|                 } | ||||
|             ], | ||||
|             id="list_of_images", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "defaults": { | ||||
|                     "type": "rgb565", | ||||
|                     "transparency": "chroma_key", | ||||
|                     "byte_order": "little_endian", | ||||
|                     "dither": "FloydSteinberg", | ||||
|                     "resize": "100x100", | ||||
|                     "invert_alpha": False, | ||||
|                 }, | ||||
|                 "images": [ | ||||
|                     { | ||||
|                         "id": "image_id", | ||||
|                         "file": "image.png", | ||||
|                     } | ||||
|                 ], | ||||
|             }, | ||||
|             id="images_with_defaults", | ||||
|         ), | ||||
|         pytest.param( | ||||
|             { | ||||
|                 "rgb565": { | ||||
|                     "alpha_channel": [ | ||||
|                         { | ||||
|                             "id": "image_id", | ||||
|                             "file": "image.png", | ||||
|                             "transparency": "alpha_channel", | ||||
|                             "byte_order": "little_endian", | ||||
|                             "dither": "FloydSteinberg", | ||||
|                             "resize": "100x100", | ||||
|                             "invert_alpha": False, | ||||
|                         } | ||||
|                     ] | ||||
|                 }, | ||||
|                 "binary": [ | ||||
|                     { | ||||
|                         "id": "image_id", | ||||
|                         "file": "image.png", | ||||
|                         "transparency": "opaque", | ||||
|                         "dither": "FloydSteinberg", | ||||
|                         "resize": "100x100", | ||||
|                         "invert_alpha": False, | ||||
|                     } | ||||
|                 ], | ||||
|             }, | ||||
|             id="type_based_organization", | ||||
|         ), | ||||
|     ], | ||||
| ) | ||||
| def test_image_configuration_success( | ||||
|     config: dict[str, Any] | list[dict[str, Any]], | ||||
| ) -> None: | ||||
|     """Test successful configuration validation.""" | ||||
|     CONFIG_SCHEMA(config) | ||||
|  | ||||
|  | ||||
| def test_image_generation( | ||||
|     generate_main: Callable[[str | Path], str], | ||||
|     component_config_path: Callable[[str], Path], | ||||
| ) -> None: | ||||
|     """Test image generation configuration.""" | ||||
|  | ||||
|     main_cpp = generate_main(component_config_path("image_test.yaml")) | ||||
|     assert "uint8_t_id[] PROGMEM = {0x24, 0x21, 0x24, 0x21" in main_cpp | ||||
|     assert ( | ||||
|         "cat_img = new image::Image(uint8_t_id, 32, 24, image::IMAGE_TYPE_RGB565, image::TRANSPARENCY_OPAQUE);" | ||||
|         in main_cpp | ||||
|     ) | ||||
							
								
								
									
										12
									
								
								tests/components/gl_r01_i2c/common.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								tests/components/gl_r01_i2c/common.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| i2c: | ||||
|   - id: i2c_gl_r01_i2c | ||||
|     scl: ${scl_pin} | ||||
|     sda: ${sda_pin} | ||||
|  | ||||
| sensor: | ||||
|   - platform: gl_r01_i2c | ||||
|     id: tof | ||||
|     name: "ToF sensor" | ||||
|     i2c_id: i2c_gl_r01_i2c | ||||
|     address: 0x74 | ||||
|     update_interval: 15s | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO16 | ||||
|   sda_pin: GPIO17 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-c3-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-c3-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO5 | ||||
|   sda_pin: GPIO4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-c3-idf.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-c3-idf.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO5 | ||||
|   sda_pin: GPIO4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-idf.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp32-idf.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO16 | ||||
|   sda_pin: GPIO17 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp8266-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.esp8266-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO5 | ||||
|   sda_pin: GPIO4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										5
									
								
								tests/components/gl_r01_i2c/test.rp2040-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tests/components/gl_r01_i2c/test.rp2040-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| substitutions: | ||||
|   scl_pin: GPIO5 | ||||
|   sda_pin: GPIO4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
| @@ -1,17 +0,0 @@ | ||||
| spi: | ||||
|   - id: spi_main_lcd | ||||
|     clk_pin: 16 | ||||
|     mosi_pin: 17 | ||||
|     miso_pin: 32 | ||||
|  | ||||
| display: | ||||
|   - platform: ili9xxx | ||||
|     id: main_lcd | ||||
|     model: ili9342 | ||||
|     cs_pin: 14 | ||||
|     dc_pin: 13 | ||||
|     reset_pin: 21 | ||||
|     invert_colors: true | ||||
|  | ||||
| <<: !include common.yaml | ||||
|  | ||||
| @@ -1,16 +0,0 @@ | ||||
| spi: | ||||
|   - id: spi_main_lcd | ||||
|     clk_pin: 6 | ||||
|     mosi_pin: 7 | ||||
|     miso_pin: 5 | ||||
|  | ||||
| display: | ||||
|   - platform: ili9xxx | ||||
|     id: main_lcd | ||||
|     model: ili9342 | ||||
|     cs_pin: 3 | ||||
|     dc_pin: 11 | ||||
|     reset_pin: 10 | ||||
|     invert_colors: true | ||||
|  | ||||
| <<: !include common.yaml | ||||
| @@ -1,16 +0,0 @@ | ||||
| spi: | ||||
|   - id: spi_main_lcd | ||||
|     clk_pin: 6 | ||||
|     mosi_pin: 7 | ||||
|     miso_pin: 5 | ||||
|  | ||||
| display: | ||||
|   - platform: ili9xxx | ||||
|     id: main_lcd | ||||
|     model: ili9342 | ||||
|     cs_pin: 3 | ||||
|     dc_pin: 11 | ||||
|     reset_pin: 10 | ||||
|     invert_colors: true | ||||
|  | ||||
| <<: !include common.yaml | ||||
| @@ -13,4 +13,13 @@ display: | ||||
|     reset_pin: 16 | ||||
|     invert_colors: true | ||||
|  | ||||
| <<: !include common.yaml | ||||
| image: | ||||
|   defaults: | ||||
|     type: rgb565 | ||||
|     transparency: opaque | ||||
|     byte_order: little_endian | ||||
|     resize: 50x50 | ||||
|     dither: FloydSteinberg | ||||
|   images: | ||||
|     - id: test_image | ||||
|       file: ../../pnglogo.png | ||||
|   | ||||
							
								
								
									
										8
									
								
								tests/components/lps22/common.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								tests/components/lps22/common.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| sensor: | ||||
|   - platform: lps22 | ||||
|     address: 0x5d | ||||
|     update_interval: 10s | ||||
|     temperature: | ||||
|       name: "LPS22 Temperature" | ||||
|     pressure: | ||||
|       name: "LPS22 Pressure" | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.esp32-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.esp32-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 16 | ||||
|     sda: 17 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.esp32-c3-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.esp32-c3-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 5 | ||||
|     sda: 4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.esp32-c3-idf.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.esp32-c3-idf.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 5 | ||||
|     sda: 4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.esp32-idf.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.esp32-idf.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 16 | ||||
|     sda: 17 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.esp8266-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.esp8266-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 5 | ||||
|     sda: 4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
							
								
								
									
										6
									
								
								tests/components/lps22/test.rp2040-ard.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								tests/components/lps22/test.rp2040-ard.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| i2c: | ||||
|   - id: i2c_lps22 | ||||
|     scl: 5 | ||||
|     sda: 4 | ||||
|  | ||||
| <<: !include common.yaml | ||||
| @@ -78,3 +78,268 @@ pytest -s tests/integration/test_host_mode_basic.py | ||||
| - Each test gets its own temporary directory and unique port | ||||
| - Port allocation minimizes race conditions by holding the socket until just before ESPHome starts | ||||
| - Output from ESPHome processes is displayed for debugging | ||||
|  | ||||
| ## Integration Test Writing Guide | ||||
|  | ||||
| ### Test Patterns and Best Practices | ||||
|  | ||||
| #### 1. Test File Naming Convention | ||||
| - Use descriptive names: `test_{category}_{feature}.py` | ||||
| - Common categories: `host_mode`, `api`, `scheduler`, `light`, `areas_and_devices` | ||||
| - Examples: | ||||
|   - `test_host_mode_basic.py` - Basic host mode functionality | ||||
|   - `test_api_message_batching.py` - API message batching | ||||
|   - `test_scheduler_stress.py` - Scheduler stress testing | ||||
|  | ||||
| #### 2. Essential Imports | ||||
| ```python | ||||
| from __future__ import annotations | ||||
|  | ||||
| import asyncio | ||||
| from typing import Any | ||||
|  | ||||
| import pytest | ||||
| from aioesphomeapi import EntityState, SensorState | ||||
|  | ||||
| from .types import APIClientConnectedFactory, RunCompiledFunction | ||||
| ``` | ||||
|  | ||||
| #### 3. Common Test Patterns | ||||
|  | ||||
| ##### Basic Entity Test | ||||
| ```python | ||||
| @pytest.mark.asyncio | ||||
| async def test_my_sensor( | ||||
|     yaml_config: str, | ||||
|     run_compiled: RunCompiledFunction, | ||||
|     api_client_connected: APIClientConnectedFactory, | ||||
| ) -> None: | ||||
|     """Test sensor functionality.""" | ||||
|     async with run_compiled(yaml_config), api_client_connected() as client: | ||||
|         # Get entity list | ||||
|         entities, services = await client.list_entities_services() | ||||
|  | ||||
|         # Find specific entity | ||||
|         sensor = next((e for e in entities if e.object_id == "my_sensor"), None) | ||||
|         assert sensor is not None | ||||
| ``` | ||||
|  | ||||
| ##### State Subscription Pattern | ||||
| ```python | ||||
| # Track state changes with futures | ||||
| loop = asyncio.get_running_loop() | ||||
| states: dict[int, EntityState] = {} | ||||
| state_future: asyncio.Future[EntityState] = loop.create_future() | ||||
|  | ||||
| def on_state(state: EntityState) -> None: | ||||
|     states[state.key] = state | ||||
|     # Check for specific condition using isinstance | ||||
|     if isinstance(state, SensorState) and state.state == expected_value: | ||||
|         if not state_future.done(): | ||||
|             state_future.set_result(state) | ||||
|  | ||||
| client.subscribe_states(on_state) | ||||
|  | ||||
| # Wait for state with timeout | ||||
| try: | ||||
|     result = await asyncio.wait_for(state_future, timeout=5.0) | ||||
| except asyncio.TimeoutError: | ||||
|     pytest.fail(f"Expected state not received. Got: {list(states.values())}") | ||||
| ``` | ||||
|  | ||||
| ##### Service Execution Pattern | ||||
| ```python | ||||
| # Find and execute service | ||||
| entities, services = await client.list_entities_services() | ||||
| my_service = next((s for s in services if s.name == "my_service"), None) | ||||
| assert my_service is not None | ||||
|  | ||||
| # Execute with parameters | ||||
| client.execute_service(my_service, {"param1": "value1", "param2": 42}) | ||||
| ``` | ||||
|  | ||||
| ##### Multiple Entity Tracking | ||||
| ```python | ||||
| # For tests with many entities | ||||
| loop = asyncio.get_running_loop() | ||||
| entity_count = 50 | ||||
| received_states: set[int] = set() | ||||
| all_states_future: asyncio.Future[bool] = loop.create_future() | ||||
|  | ||||
| def on_state(state: EntityState) -> None: | ||||
|     received_states.add(state.key) | ||||
|     if len(received_states) >= entity_count and not all_states_future.done(): | ||||
|         all_states_future.set_result(True) | ||||
|  | ||||
| client.subscribe_states(on_state) | ||||
| await asyncio.wait_for(all_states_future, timeout=10.0) | ||||
| ``` | ||||
|  | ||||
| #### 4. YAML Fixture Guidelines | ||||
|  | ||||
| ##### Naming Convention | ||||
| - Match test function name: `test_my_feature` → `fixtures/my_feature.yaml` | ||||
| - Note: Remove `test_` prefix for fixture filename | ||||
|  | ||||
| ##### Basic Structure | ||||
| ```yaml | ||||
| esphome: | ||||
|   name: test-name  # Use kebab-case | ||||
|   # Optional: areas, devices, platformio_options | ||||
|  | ||||
| host:  # Always use host platform for integration tests | ||||
| api:   # Port injected automatically | ||||
| logger: | ||||
|   level: DEBUG  # Optional: Set log level | ||||
|  | ||||
| # Component configurations | ||||
| sensor: | ||||
|   - platform: template | ||||
|     name: "My Sensor" | ||||
|     id: my_sensor | ||||
|     lambda: return 42.0; | ||||
|     update_interval: 0.1s  # Fast updates for testing | ||||
| ``` | ||||
|  | ||||
| ##### Advanced Features | ||||
| ```yaml | ||||
| # External components for custom test code | ||||
| external_components: | ||||
|   - source: | ||||
|       type: local | ||||
|       path: EXTERNAL_COMPONENT_PATH  # Replaced by test framework | ||||
|     components: [my_test_component] | ||||
|  | ||||
| # Areas and devices | ||||
| esphome: | ||||
|   name: test-device | ||||
|   areas: | ||||
|     - id: living_room | ||||
|       name: "Living Room" | ||||
|     - id: kitchen | ||||
|       name: "Kitchen" | ||||
|       parent_id: living_room | ||||
|   devices: | ||||
|     - id: my_device | ||||
|       name: "Test Device" | ||||
|       area_id: living_room | ||||
|  | ||||
| # API services | ||||
| api: | ||||
|   services: | ||||
|     - service: test_service | ||||
|       variables: | ||||
|         my_param: string | ||||
|       then: | ||||
|         - logger.log: | ||||
|             format: "Service called with: %s" | ||||
|             args: [my_param.c_str()] | ||||
| ``` | ||||
|  | ||||
| #### 5. Testing Complex Scenarios | ||||
|  | ||||
| ##### External Components | ||||
| Create C++ components in `fixtures/external_components/` for: | ||||
| - Stress testing | ||||
| - Custom entity behaviors | ||||
| - Scheduler testing | ||||
| - Memory management tests | ||||
|  | ||||
| ##### Log Line Monitoring | ||||
| ```python | ||||
| log_lines: list[str] = [] | ||||
|  | ||||
| def on_log_line(line: str) -> None: | ||||
|     log_lines.append(line) | ||||
|     if "expected message" in line: | ||||
|         # Handle specific log messages | ||||
|  | ||||
| async with run_compiled(yaml_config, line_callback=on_log_line): | ||||
|     # Test implementation | ||||
| ``` | ||||
|  | ||||
| Example using futures for specific log patterns: | ||||
| ```python | ||||
| import re | ||||
|  | ||||
| loop = asyncio.get_running_loop() | ||||
| connected_future = loop.create_future() | ||||
| service_future = loop.create_future() | ||||
|  | ||||
| # Patterns to match | ||||
| connected_pattern = re.compile(r"Client .* connected from") | ||||
| service_pattern = re.compile(r"Service called") | ||||
|  | ||||
| def check_output(line: str) -> None: | ||||
|     """Check log output for expected messages.""" | ||||
|     if not connected_future.done() and connected_pattern.search(line): | ||||
|         connected_future.set_result(True) | ||||
|     elif not service_future.done() and service_pattern.search(line): | ||||
|         service_future.set_result(True) | ||||
|  | ||||
| async with run_compiled(yaml_config, line_callback=check_output): | ||||
|     async with api_client_connected() as client: | ||||
|         # Wait for specific log message | ||||
|         await asyncio.wait_for(connected_future, timeout=5.0) | ||||
|  | ||||
|         # Do test actions... | ||||
|  | ||||
|         # Wait for service log | ||||
|         await asyncio.wait_for(service_future, timeout=5.0) | ||||
| ``` | ||||
|  | ||||
| **Note**: Tests that monitor log messages typically have fewer race conditions compared to state-based testing, making them more reliable. However, be aware that the host platform currently does not have a thread-safe logger, so logging from threads will not work correctly. | ||||
|  | ||||
| ##### Timeout Handling | ||||
| ```python | ||||
| # Always use timeouts for async operations | ||||
| try: | ||||
|     result = await asyncio.wait_for(some_future, timeout=5.0) | ||||
| except asyncio.TimeoutError: | ||||
|     pytest.fail("Operation timed out - check test expectations") | ||||
| ``` | ||||
|  | ||||
| #### 6. Common Assertions | ||||
|  | ||||
| ```python | ||||
| # Device info | ||||
| assert device_info.name == "expected-name" | ||||
| assert device_info.compilation_time is not None | ||||
|  | ||||
| # Entity properties | ||||
| assert sensor.accuracy_decimals == 2 | ||||
| assert sensor.state_class == 1  # measurement | ||||
| assert sensor.force_update is True | ||||
|  | ||||
| # Service availability | ||||
| assert len(services) > 0 | ||||
| assert any(s.name == "expected_service" for s in services) | ||||
|  | ||||
| # State values | ||||
| assert state.state == expected_value | ||||
| assert state.missing_state is False | ||||
| ``` | ||||
|  | ||||
| #### 7. Debugging Tips | ||||
|  | ||||
| - Use `pytest -s` to see ESPHome output during tests | ||||
| - Add descriptive failure messages to assertions | ||||
| - Use `pytest.fail()` with detailed error info for timeouts | ||||
| - Check `log_lines` for compilation or runtime errors | ||||
| - Enable debug logging in YAML fixtures when needed | ||||
|  | ||||
| #### 8. Performance Considerations | ||||
|  | ||||
| - Use short update intervals (0.1s) for faster tests | ||||
| - Set reasonable timeouts (5-10s for most operations) | ||||
| - Batch multiple assertions when possible | ||||
| - Clean up resources properly using context managers | ||||
|  | ||||
| #### 9. Test Categories | ||||
|  | ||||
| - **Basic Tests**: Minimal functionality verification | ||||
| - **Entity Tests**: Sensor, switch, light behavior | ||||
| - **API Tests**: Message batching, services, events | ||||
| - **Scheduler Tests**: Timing, defer operations, stress | ||||
| - **Memory Tests**: Conditional compilation, optimization | ||||
| - **Integration Tests**: Areas, devices, complex interactions | ||||
|   | ||||
| @@ -5,12 +5,14 @@ from __future__ import annotations | ||||
| import asyncio | ||||
| from collections.abc import AsyncGenerator, Callable, Generator | ||||
| from contextlib import AbstractAsyncContextManager, asynccontextmanager | ||||
| import fcntl | ||||
| import logging | ||||
| import os | ||||
| from pathlib import Path | ||||
| import platform | ||||
| import signal | ||||
| import socket | ||||
| import subprocess | ||||
| import sys | ||||
| import tempfile | ||||
| from typing import TextIO | ||||
| @@ -50,6 +52,66 @@ if platform.system() == "Windows": | ||||
| import pty  # not available on Windows | ||||
|  | ||||
|  | ||||
| def _get_platformio_env(cache_dir: Path) -> dict[str, str]: | ||||
|     """Get environment variables for PlatformIO with shared cache.""" | ||||
|     env = os.environ.copy() | ||||
|     env["PLATFORMIO_CORE_DIR"] = str(cache_dir) | ||||
|     env["PLATFORMIO_CACHE_DIR"] = str(cache_dir / ".cache") | ||||
|     env["PLATFORMIO_LIBDEPS_DIR"] = str(cache_dir / "libdeps") | ||||
|     return env | ||||
|  | ||||
|  | ||||
| @pytest.fixture(scope="session") | ||||
| def shared_platformio_cache() -> Generator[Path]: | ||||
|     """Initialize a shared PlatformIO cache for all integration tests.""" | ||||
|     # Use a dedicated directory for integration tests to avoid conflicts | ||||
|     test_cache_dir = Path.home() / ".esphome-integration-tests" | ||||
|     cache_dir = test_cache_dir / "platformio" | ||||
|  | ||||
|     # Use a lock file in the home directory to ensure only one process initializes the cache | ||||
|     # This is needed when running with pytest-xdist | ||||
|     # The lock file must be in a directory that already exists to avoid race conditions | ||||
|     lock_file = Path.home() / ".esphome-integration-tests-init.lock" | ||||
|  | ||||
|     # Always acquire the lock to ensure cache is ready before proceeding | ||||
|     with open(lock_file, "w") as lock_fd: | ||||
|         fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX) | ||||
|  | ||||
|         # Check if cache needs initialization while holding the lock | ||||
|         if not cache_dir.exists() or not any(cache_dir.iterdir()): | ||||
|             # Create the test cache directory if it doesn't exist | ||||
|             test_cache_dir.mkdir(exist_ok=True) | ||||
|  | ||||
|             with tempfile.TemporaryDirectory() as tmpdir: | ||||
|                 # Create a basic host config | ||||
|                 init_dir = Path(tmpdir) | ||||
|                 config_path = init_dir / "cache_init.yaml" | ||||
|                 config_path.write_text("""esphome: | ||||
|   name: cache-init | ||||
| host: | ||||
| api: | ||||
|   encryption: | ||||
|     key: "IIevImVI42I0FGos5nLqFK91jrJehrgidI0ArwMLr8w=" | ||||
| logger: | ||||
| """) | ||||
|  | ||||
|                 # Run compilation to populate the cache | ||||
|                 # We must succeed here to avoid race conditions where multiple | ||||
|                 # tests try to populate the same cache directory simultaneously | ||||
|                 env = _get_platformio_env(cache_dir) | ||||
|  | ||||
|                 subprocess.run( | ||||
|                     ["esphome", "compile", str(config_path)], | ||||
|                     check=True, | ||||
|                     cwd=init_dir, | ||||
|                     env=env, | ||||
|                 ) | ||||
|  | ||||
|         # Lock is held until here, ensuring cache is fully populated before any test proceeds | ||||
|  | ||||
|     yield cache_dir | ||||
|  | ||||
|  | ||||
| @pytest.fixture(scope="module", autouse=True) | ||||
| def enable_aioesphomeapi_debug_logging(): | ||||
|     """Enable debug logging for aioesphomeapi to help diagnose connection issues.""" | ||||
| @@ -161,10 +223,15 @@ async def write_yaml_config( | ||||
| @pytest_asyncio.fixture | ||||
| async def compile_esphome( | ||||
|     integration_test_dir: Path, | ||||
|     shared_platformio_cache: Path, | ||||
| ) -> AsyncGenerator[CompileFunction]: | ||||
|     """Compile an ESPHome configuration and return the binary path.""" | ||||
|  | ||||
|     async def _compile(config_path: Path) -> Path: | ||||
|         # Use the shared PlatformIO cache for faster compilation | ||||
|         # This avoids re-downloading dependencies for each test | ||||
|         env = _get_platformio_env(shared_platformio_cache) | ||||
|  | ||||
|         # Retry compilation up to 3 times if we get a segfault | ||||
|         max_retries = 3 | ||||
|         for attempt in range(max_retries): | ||||
| @@ -179,6 +246,7 @@ async def compile_esphome( | ||||
|                 stdin=asyncio.subprocess.DEVNULL, | ||||
|                 # Start in a new process group to isolate signal handling | ||||
|                 start_new_session=True, | ||||
|                 env=env, | ||||
|             ) | ||||
|             await proc.wait() | ||||
|  | ||||
|   | ||||
| @@ -2,14 +2,10 @@ esphome: | ||||
|   name: api-conditional-memory-test | ||||
| host: | ||||
| api: | ||||
|   batch_delay: 0ms | ||||
|   actions: | ||||
|     - action: test_simple_service | ||||
|       then: | ||||
|         - logger.log: "Simple service called" | ||||
|         - binary_sensor.template.publish: | ||||
|             id: service_called_sensor | ||||
|             state: ON | ||||
|     - action: test_service_with_args | ||||
|       variables: | ||||
|         arg_string: string | ||||
| @@ -20,53 +16,14 @@ api: | ||||
|         - logger.log: | ||||
|             format: "Service called with: %s, %d, %d, %.2f" | ||||
|             args: [arg_string.c_str(), arg_int, arg_bool, arg_float] | ||||
|         - sensor.template.publish: | ||||
|             id: service_arg_sensor | ||||
|             state: !lambda 'return arg_float;' | ||||
|   on_client_connected: | ||||
|     - logger.log: | ||||
|         format: "Client %s connected from %s" | ||||
|         args: [client_info.c_str(), client_address.c_str()] | ||||
|     - binary_sensor.template.publish: | ||||
|         id: client_connected | ||||
|         state: ON | ||||
|     - text_sensor.template.publish: | ||||
|         id: last_client_info | ||||
|         state: !lambda 'return client_info;' | ||||
|   on_client_disconnected: | ||||
|     - logger.log: | ||||
|         format: "Client %s disconnected from %s" | ||||
|         args: [client_info.c_str(), client_address.c_str()] | ||||
|     - binary_sensor.template.publish: | ||||
|         id: client_connected | ||||
|         state: OFF | ||||
|     - binary_sensor.template.publish: | ||||
|         id: client_disconnected_event | ||||
|         state: ON | ||||
|  | ||||
| logger: | ||||
|   level: DEBUG | ||||
|  | ||||
| binary_sensor: | ||||
|   - platform: template | ||||
|     name: "Client Connected" | ||||
|     id: client_connected | ||||
|     device_class: connectivity | ||||
|   - platform: template | ||||
|     name: "Client Disconnected Event" | ||||
|     id: client_disconnected_event | ||||
|   - platform: template | ||||
|     name: "Service Called" | ||||
|     id: service_called_sensor | ||||
|  | ||||
| sensor: | ||||
|   - platform: template | ||||
|     name: "Service Argument Value" | ||||
|     id: service_arg_sensor | ||||
|     unit_of_measurement: "" | ||||
|     accuracy_decimals: 2 | ||||
|  | ||||
| text_sensor: | ||||
|   - platform: template | ||||
|     name: "Last Client Info" | ||||
|     id: last_client_info | ||||
|   | ||||
| @@ -23,19 +23,6 @@ void SchedulerStringLifetimeComponent::run_string_lifetime_test() { | ||||
|   test_vector_reallocation(); | ||||
|   test_string_move_semantics(); | ||||
|   test_lambda_capture_lifetime(); | ||||
|  | ||||
|   // Schedule final check | ||||
|   this->set_timeout("final_check", 200, [this]() { | ||||
|     ESP_LOGI(TAG, "String lifetime tests complete"); | ||||
|     ESP_LOGI(TAG, "Tests passed: %d", this->tests_passed_); | ||||
|     ESP_LOGI(TAG, "Tests failed: %d", this->tests_failed_); | ||||
|  | ||||
|     if (this->tests_failed_ == 0) { | ||||
|       ESP_LOGI(TAG, "SUCCESS: All string lifetime tests passed!"); | ||||
|     } else { | ||||
|       ESP_LOGE(TAG, "FAILURE: %d string lifetime tests failed!", this->tests_failed_); | ||||
|     } | ||||
|   }); | ||||
| } | ||||
|  | ||||
| void SchedulerStringLifetimeComponent::run_test1() { | ||||
| @@ -69,7 +56,6 @@ void SchedulerStringLifetimeComponent::run_test5() { | ||||
| } | ||||
|  | ||||
| void SchedulerStringLifetimeComponent::run_final_check() { | ||||
|   ESP_LOGI(TAG, "String lifetime tests complete"); | ||||
|   ESP_LOGI(TAG, "Tests passed: %d", this->tests_passed_); | ||||
|   ESP_LOGI(TAG, "Tests failed: %d", this->tests_failed_); | ||||
|  | ||||
| @@ -78,6 +64,7 @@ void SchedulerStringLifetimeComponent::run_final_check() { | ||||
|   } else { | ||||
|     ESP_LOGE(TAG, "FAILURE: %d string lifetime tests failed!", this->tests_failed_); | ||||
|   } | ||||
|   ESP_LOGI(TAG, "String lifetime tests complete"); | ||||
| } | ||||
|  | ||||
| void SchedulerStringLifetimeComponent::test_temporary_string_lifetime() { | ||||
|   | ||||
							
								
								
									
										43
									
								
								tests/integration/fixtures/scheduler_null_name.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								tests/integration/fixtures/scheduler_null_name.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| esphome: | ||||
|   name: scheduler-null-name | ||||
|  | ||||
| host: | ||||
|  | ||||
| logger: | ||||
|   level: DEBUG | ||||
|  | ||||
| api: | ||||
|   services: | ||||
|     - service: test_null_name | ||||
|       then: | ||||
|         - lambda: |- | ||||
|             // First, create a scenario that would trigger the crash | ||||
|             // The crash happens when defer() is called with a name that would be cancelled | ||||
|  | ||||
|             // Test 1: Create a defer with a valid name | ||||
|             App.scheduler.set_timeout(nullptr, "test_defer", 0, []() { | ||||
|               ESP_LOGI("TEST", "First defer should be cancelled"); | ||||
|             }); | ||||
|  | ||||
|             // Test 2: Create another defer with the same name - this triggers cancel_item_locked_ | ||||
|             // In the unfixed code, this would crash if the name was NULL | ||||
|             App.scheduler.set_timeout(nullptr, "test_defer", 0, []() { | ||||
|               ESP_LOGI("TEST", "Second defer executed"); | ||||
|             }); | ||||
|  | ||||
|             // Test 3: Now test with nullptr - this is the actual crash scenario | ||||
|             // Create a defer item without a name (like voice assistant does) | ||||
|             const char* null_name = nullptr; | ||||
|             App.scheduler.set_timeout(nullptr, null_name, 0, []() { | ||||
|               ESP_LOGI("TEST", "Defer with null name executed"); | ||||
|             }); | ||||
|  | ||||
|             // Test 4: Create another defer with null name - this would trigger the crash | ||||
|             App.scheduler.set_timeout(nullptr, null_name, 0, []() { | ||||
|               ESP_LOGI("TEST", "Second null defer executed"); | ||||
|             }); | ||||
|  | ||||
|             // Test 5: Verify scheduler still works | ||||
|             App.scheduler.set_timeout(nullptr, "valid_timeout", 50, []() { | ||||
|               ESP_LOGI("TEST", "Test completed successfully"); | ||||
|             }); | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user