diff --git a/.clang-tidy.hash b/.clang-tidy.hash index 2cd4319325..3ade00f0cd 100644 --- a/.clang-tidy.hash +++ b/.clang-tidy.hash @@ -1 +1 @@ -d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248 +3d46b63015d761c85ca9cb77ab79a389509e5776701fb22aed16e7b79d432c0c diff --git a/.coveragerc b/.coveragerc index f23592be24..c15e79a31b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,4 +1,5 @@ [run] omit = esphome/components/* + esphome/analyze_memory/* tests/integration/* diff --git a/.github/workflows/auto-label-pr.yml b/.github/workflows/auto-label-pr.yml index 1670bd1821..4e2f086f47 100644 --- a/.github/workflows/auto-label-pr.yml +++ b/.github/workflows/auto-label-pr.yml @@ -53,6 +53,7 @@ jobs: 'new-target-platform', 'merging-to-release', 'merging-to-beta', + 'chained-pr', 'core', 'small-pr', 'dashboard', @@ -140,6 +141,8 @@ jobs: labels.add('merging-to-release'); } else if (baseRef === 'beta') { labels.add('merging-to-beta'); + } else if (baseRef !== 'dev') { + labels.add('chained-pr'); } return labels; @@ -528,8 +531,8 @@ jobs: const apiData = await fetchApiData(); const baseRef = context.payload.pull_request.base.ref; - // Early exit for non-dev branches - if (baseRef !== 'dev') { + // Early exit for release and beta branches only + if (baseRef === 'release' || baseRef === 'beta') { const branchLabels = await detectMergeBranch(); const finalLabels = Array.from(branchLabels); diff --git a/.github/workflows/ci-memory-impact-comment.yml b/.github/workflows/ci-memory-impact-comment.yml new file mode 100644 index 0000000000..eea1d2c148 --- /dev/null +++ b/.github/workflows/ci-memory-impact-comment.yml @@ -0,0 +1,111 @@ +--- +name: Memory Impact Comment (Forks) + +on: + workflow_run: + workflows: ["CI"] + types: [completed] + +permissions: + contents: read + pull-requests: write + actions: read + +jobs: + memory-impact-comment: + name: Post memory impact comment (fork PRs only) + runs-on: ubuntu-24.04 + # Only run for PRs from forks that had successful CI runs + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' && + github.event.workflow_run.head_repository.full_name != github.repository + env: + GH_TOKEN: ${{ github.token }} + steps: + - name: Get PR details + id: pr + run: | + # Get PR details by searching for PR with matching head SHA + # The workflow_run.pull_requests field is often empty for forks + # Use paginate to handle repos with many open PRs + head_sha="${{ github.event.workflow_run.head_sha }}" + pr_data=$(gh api --paginate "/repos/${{ github.repository }}/pulls" \ + --jq ".[] | select(.head.sha == \"$head_sha\") | {number: .number, base_ref: .base.ref}" \ + | head -n 1) + + if [ -z "$pr_data" ]; then + echo "No PR found for SHA $head_sha, skipping" + echo "skip=true" >> "$GITHUB_OUTPUT" + exit 0 + fi + + pr_number=$(echo "$pr_data" | jq -r '.number') + base_ref=$(echo "$pr_data" | jq -r '.base_ref') + + echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT" + echo "base_ref=$base_ref" >> "$GITHUB_OUTPUT" + echo "Found PR #$pr_number targeting base branch: $base_ref" + + - name: Check out code from base repository + if: steps.pr.outputs.skip != 'true' + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Always check out from the base repository (esphome/esphome), never from forks + # Use the PR's target branch to ensure we run trusted code from the main repo + repository: ${{ github.repository }} + ref: ${{ steps.pr.outputs.base_ref }} + + - name: Restore Python + if: steps.pr.outputs.skip != 'true' + uses: ./.github/actions/restore-python + with: + python-version: "3.11" + cache-key: ${{ hashFiles('.cache-key') }} + + - name: Download memory analysis artifacts + if: steps.pr.outputs.skip != 'true' + run: | + run_id="${{ github.event.workflow_run.id }}" + echo "Downloading artifacts from workflow run $run_id" + + mkdir -p memory-analysis + + # Download target analysis artifact + if gh run download --name "memory-analysis-target" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then + echo "Downloaded memory-analysis-target artifact." + else + echo "No memory-analysis-target artifact found." + fi + + # Download PR analysis artifact + if gh run download --name "memory-analysis-pr" --dir memory-analysis --repo "${{ github.repository }}" "$run_id"; then + echo "Downloaded memory-analysis-pr artifact." + else + echo "No memory-analysis-pr artifact found." + fi + + - name: Check if artifacts exist + id: check + if: steps.pr.outputs.skip != 'true' + run: | + if [ -f ./memory-analysis/memory-analysis-target.json ] && [ -f ./memory-analysis/memory-analysis-pr.json ]; then + echo "found=true" >> "$GITHUB_OUTPUT" + else + echo "found=false" >> "$GITHUB_OUTPUT" + echo "Memory analysis artifacts not found, skipping comment" + fi + + - name: Post or update PR comment + if: steps.pr.outputs.skip != 'true' && steps.check.outputs.found == 'true' + env: + PR_NUMBER: ${{ steps.pr.outputs.pr_number }} + run: | + . venv/bin/activate + # Pass PR number and JSON file paths directly to Python script + # Let Python parse the JSON to avoid shell injection risks + # The script will validate and sanitize all inputs + python script/ci_memory_impact_comment.py \ + --pr-number "$PR_NUMBER" \ + --target-json ./memory-analysis/memory-analysis-target.json \ + --pr-json ./memory-analysis/memory-analysis-pr.json diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 87e182fe4d..cb04f6bf8d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -170,11 +170,16 @@ jobs: outputs: integration-tests: ${{ steps.determine.outputs.integration-tests }} clang-tidy: ${{ steps.determine.outputs.clang-tidy }} + clang-tidy-mode: ${{ steps.determine.outputs.clang-tidy-mode }} python-linters: ${{ steps.determine.outputs.python-linters }} changed-components: ${{ steps.determine.outputs.changed-components }} changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }} directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }} component-test-count: ${{ steps.determine.outputs.component-test-count }} + changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }} + memory_impact: ${{ steps.determine.outputs.memory-impact }} + cpp-unit-tests-run-all: ${{ steps.determine.outputs.cpp-unit-tests-run-all }} + cpp-unit-tests-components: ${{ steps.determine.outputs.cpp-unit-tests-components }} steps: - name: Check out code from GitHub uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -199,11 +204,16 @@ jobs: # Extract individual fields echo "integration-tests=$(echo "$output" | jq -r '.integration_tests')" >> $GITHUB_OUTPUT echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT + echo "clang-tidy-mode=$(echo "$output" | jq -r '.clang_tidy_mode')" >> $GITHUB_OUTPUT echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT + echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT + echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT + echo "cpp-unit-tests-run-all=$(echo "$output" | jq -r '.cpp_unit_tests_run_all')" >> $GITHUB_OUTPUT + echo "cpp-unit-tests-components=$(echo "$output" | jq -c '.cpp_unit_tests_components')" >> $GITHUB_OUTPUT integration-tests: name: Run integration tests @@ -241,7 +251,34 @@ jobs: . venv/bin/activate pytest -vv --no-cov --tb=native -n auto tests/integration/ - clang-tidy: + cpp-unit-tests: + name: Run C++ unit tests + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]') + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Run cpp_unit_test.py + run: | + . venv/bin/activate + if [ "${{ needs.determine-jobs.outputs.cpp-unit-tests-run-all }}" = "true" ]; then + script/cpp_unit_test.py --all + else + ARGS=$(echo '${{ needs.determine-jobs.outputs.cpp-unit-tests-components }}' | jq -r '.[] | @sh' | xargs) + script/cpp_unit_test.py $ARGS + fi + + clang-tidy-single: name: ${{ matrix.name }} runs-on: ubuntu-24.04 needs: @@ -259,22 +296,6 @@ jobs: name: Run script/clang-tidy for ESP8266 options: --environment esp8266-arduino-tidy --grep USE_ESP8266 pio_cache_key: tidyesp8266 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 1/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 1 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 2/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 2 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 3/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 3 - pio_cache_key: tidyesp32 - - id: clang-tidy - name: Run script/clang-tidy for ESP32 Arduino 4/4 - options: --environment esp32-arduino-tidy --split-num 4 --split-at 4 - pio_cache_key: tidyesp32 - id: clang-tidy name: Run script/clang-tidy for ESP32 IDF options: --environment esp32-idf-tidy --grep USE_ESP_IDF @@ -355,6 +376,166 @@ jobs: # yamllint disable-line rule:line-length if: always() + clang-tidy-nosplit: + name: Run script/clang-tidy for ESP32 Arduino + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: needs.determine-jobs.outputs.clang-tidy-mode == 'nosplit' + env: + GH_TOKEN: ${{ github.token }} + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Need history for HEAD~1 to work for checking changed files + fetch-depth: 2 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: github.ref == 'refs/heads/dev' + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Cache platformio + if: github.ref != 'refs/heads/dev' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/workflows/matchers/gcc.json" + echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" + + - name: Check if full clang-tidy scan needed + id: check_full_scan + run: | + . venv/bin/activate + if python script/clang_tidy_hash.py --check; then + echo "full_scan=true" >> $GITHUB_OUTPUT + echo "reason=hash_changed" >> $GITHUB_OUTPUT + else + echo "full_scan=false" >> $GITHUB_OUTPUT + echo "reason=normal" >> $GITHUB_OUTPUT + fi + + - name: Run clang-tidy + run: | + . venv/bin/activate + if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then + echo "Running FULL clang-tidy scan (hash changed)" + script/clang-tidy --all-headers --fix --environment esp32-arduino-tidy + else + echo "Running clang-tidy on changed files only" + script/clang-tidy --all-headers --fix --changed --environment esp32-arduino-tidy + fi + env: + # Also cache libdeps, store them in a ~/.platformio subfolder + PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps + + - name: Suggested changes + run: script/ci-suggest-changes + if: always() + + clang-tidy-split: + name: ${{ matrix.name }} + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: needs.determine-jobs.outputs.clang-tidy-mode == 'split' + env: + GH_TOKEN: ${{ github.token }} + strategy: + fail-fast: false + max-parallel: 1 + matrix: + include: + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 1/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 1 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 2/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 2 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 3/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 3 + - id: clang-tidy + name: Run script/clang-tidy for ESP32 Arduino 4/4 + options: --environment esp32-arduino-tidy --split-num 4 --split-at 4 + + steps: + - name: Check out code from GitHub + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + # Need history for HEAD~1 to work for checking changed files + fetch-depth: 2 + + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: github.ref == 'refs/heads/dev' + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Cache platformio + if: github.ref != 'refs/heads/dev' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }} + + - name: Register problem matchers + run: | + echo "::add-matcher::.github/workflows/matchers/gcc.json" + echo "::add-matcher::.github/workflows/matchers/clang-tidy.json" + + - name: Check if full clang-tidy scan needed + id: check_full_scan + run: | + . venv/bin/activate + if python script/clang_tidy_hash.py --check; then + echo "full_scan=true" >> $GITHUB_OUTPUT + echo "reason=hash_changed" >> $GITHUB_OUTPUT + else + echo "full_scan=false" >> $GITHUB_OUTPUT + echo "reason=normal" >> $GITHUB_OUTPUT + fi + + - name: Run clang-tidy + run: | + . venv/bin/activate + if [ "${{ steps.check_full_scan.outputs.full_scan }}" = "true" ]; then + echo "Running FULL clang-tidy scan (hash changed)" + script/clang-tidy --all-headers --fix ${{ matrix.options }} + else + echo "Running clang-tidy on changed files only" + script/clang-tidy --all-headers --fix --changed ${{ matrix.options }} + fi + env: + # Also cache libdeps, store them in a ~/.platformio subfolder + PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps + + - name: Suggested changes + run: script/ci-suggest-changes + if: always() + test-build-components-splitter: name: Split components for intelligent grouping (40 weighted per batch) runs-on: ubuntu-24.04 @@ -521,6 +702,271 @@ jobs: - uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0 if: always() + memory-impact-target-branch: + name: Build target branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }} + skip: ${{ steps.check-script.outputs.skip }} + steps: + - name: Check out target branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + ref: ${{ github.base_ref }} + + # Check if memory impact extraction script exists on target branch + # If not, skip the analysis (this handles older branches that don't have the feature) + - name: Check for memory impact script + id: check-script + run: | + if [ -f "script/ci_memory_impact_extract.py" ]; then + echo "skip=false" >> $GITHUB_OUTPUT + else + echo "skip=true" >> $GITHUB_OUTPUT + echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis" + fi + + # All remaining steps only run if script exists + - name: Generate cache key + id: cache-key + if: steps.check-script.outputs.skip != 'true' + run: | + # Get the commit SHA of the target branch + target_sha=$(git rev-parse HEAD) + + # Hash the build infrastructure files (all files that affect build/analysis) + infra_hash=$(cat \ + script/test_build_components.py \ + script/ci_memory_impact_extract.py \ + script/analyze_component_buses.py \ + script/merge_component_configs.py \ + script/ci_helpers.py \ + .github/workflows/ci.yml \ + | sha256sum | cut -d' ' -f1) + + # Get platform and components from job inputs + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1) + + # Combine into cache key + cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}" + echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT + echo "Cache key: ${cache_key}" + + - name: Restore cached memory analysis + id: cache-memory-analysis + if: steps.check-script.outputs.skip != 'true' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + - name: Cache status + if: steps.check-script.outputs.skip != 'true' + run: | + if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then + echo "✓ Cache hit! Using cached memory analysis results." + echo " Skipping build step to save time." + else + echo "✗ Cache miss. Will build and analyze memory usage." + fi + + - name: Restore Python + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + + - name: Cache platformio + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + + - name: Build, compile, and analyze memory + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' + id: build + run: | + . venv/bin/activate + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + + # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + tee /dev/stderr | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-target.json + + # Add metadata to JSON before caching + python script/ci_add_metadata_to_json.py \ + --json-file memory-analysis-target.json \ + --components "$components" \ + --platform "$platform" + + - name: Save memory analysis to cache + if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success' + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: memory-analysis-target.json + key: ${{ steps.cache-key.outputs.cache-key }} + + - name: Extract memory usage for outputs + id: extract + if: steps.check-script.outputs.skip != 'true' + run: | + if [ -f memory-analysis-target.json ]; then + ram=$(jq -r '.ram_bytes' memory-analysis-target.json) + flash=$(jq -r '.flash_bytes' memory-analysis-target.json) + echo "ram_usage=${ram}" >> $GITHUB_OUTPUT + echo "flash_usage=${flash}" >> $GITHUB_OUTPUT + echo "RAM: ${ram} bytes, Flash: ${flash} bytes" + else + echo "Error: memory-analysis-target.json not found" + exit 1 + fi + + - name: Upload memory analysis JSON + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: memory-analysis-target + path: memory-analysis-target.json + if-no-files-found: warn + retention-days: 1 + + memory-impact-pr-branch: + name: Build PR branch for memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' + outputs: + ram_usage: ${{ steps.extract.outputs.ram_usage }} + flash_usage: ${{ steps.extract.outputs.flash_usage }} + steps: + - name: Check out PR branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Cache platformio + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.platformio + key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }} + - name: Build, compile, and analyze memory + id: extract + run: | + . venv/bin/activate + components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}' + platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}" + + echo "Building with test_build_components.py for $platform with components:" + echo "$components" | jq -r '.[]' | sed 's/^/ - /' + + # Use test_build_components.py which handles grouping automatically + # Pass components as comma-separated list + component_list=$(echo "$components" | jq -r 'join(",")') + + echo "Compiling with test_build_components.py..." + + # Run build and extract memory with auto-detection of build directory for detailed analysis + # Use tee to show output in CI while also piping to extraction script + python script/test_build_components.py \ + -e compile \ + -c "$component_list" \ + -t "$platform" 2>&1 | \ + tee /dev/stderr | \ + python script/ci_memory_impact_extract.py \ + --output-env \ + --output-json memory-analysis-pr.json + + # Add metadata to JSON (components and platform are in shell variables above) + python script/ci_add_metadata_to_json.py \ + --json-file memory-analysis-pr.json \ + --components "$components" \ + --platform "$platform" + + - name: Upload memory analysis JSON + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: memory-analysis-pr + path: memory-analysis-pr.json + if-no-files-found: warn + retention-days: 1 + + memory-impact-comment: + name: Comment memory impact + runs-on: ubuntu-24.04 + needs: + - common + - determine-jobs + - memory-impact-target-branch + - memory-impact-pr-branch + if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true' + permissions: + contents: read + pull-requests: write + env: + GH_TOKEN: ${{ github.token }} + steps: + - name: Check out code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Restore Python + uses: ./.github/actions/restore-python + with: + python-version: ${{ env.DEFAULT_PYTHON }} + cache-key: ${{ needs.common.outputs.cache-key }} + - name: Download target analysis JSON + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + with: + name: memory-analysis-target + path: ./memory-analysis + continue-on-error: true + - name: Download PR analysis JSON + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 + with: + name: memory-analysis-pr + path: ./memory-analysis + continue-on-error: true + - name: Post or update PR comment + env: + PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + . venv/bin/activate + + # Pass JSON file paths directly to Python script + # All data is extracted from JSON files for security + python script/ci_memory_impact_comment.py \ + --pr-number "$PR_NUMBER" \ + --target-json ./memory-analysis/memory-analysis-target.json \ + --pr-json ./memory-analysis/memory-analysis-pr.json + ci-status: name: CI Status runs-on: ubuntu-24.04 @@ -530,11 +976,16 @@ jobs: - pylint - pytest - integration-tests - - clang-tidy + - clang-tidy-single + - clang-tidy-nosplit + - clang-tidy-split - determine-jobs - test-build-components-splitter - test-build-components-split - pre-commit-ci-lite + - memory-impact-target-branch + - memory-impact-pr-branch + - memory-impact-comment if: always() steps: - name: Success diff --git a/.github/workflows/status-check-labels.yml b/.github/workflows/status-check-labels.yml index e44fd18132..cca70815b9 100644 --- a/.github/workflows/status-check-labels.yml +++ b/.github/workflows/status-check-labels.yml @@ -14,6 +14,7 @@ jobs: label: - needs-docs - merge-after-release + - chained-pr steps: - name: Check for ${{ matrix.label }} label uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 diff --git a/CODEOWNERS b/CODEOWNERS index b5cefa1e0c..4f860375d9 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -70,6 +70,7 @@ esphome/components/bl0939/* @ziceva esphome/components/bl0940/* @dan-s-github @tobias- esphome/components/bl0942/* @dbuezas @dwmw2 esphome/components/ble_client/* @buxtronix @clydebarrow +esphome/components/ble_nus/* @tomaszduda23 esphome/components/bluetooth_proxy/* @bdraco @jesserockz esphome/components/bme280_base/* @esphome/core esphome/components/bme280_spi/* @apbodrov @@ -160,6 +161,7 @@ esphome/components/esp32_rmt_led_strip/* @jesserockz esphome/components/esp8266/* @esphome/core esphome/components/esp_ldo/* @clydebarrow esphome/components/espnow/* @jesserockz +esphome/components/espnow/packet_transport/* @EasilyBoredEngineer esphome/components/ethernet_info/* @gtjadsonsantos esphome/components/event/* @nohat esphome/components/exposure_notifications/* @OttoWinter diff --git a/esphome/__main__.py b/esphome/__main__.py index 079dd4753a..b110d3167f 100644 --- a/esphome/__main__.py +++ b/esphome/__main__.py @@ -62,6 +62,40 @@ from esphome.util import ( _LOGGER = logging.getLogger(__name__) +# Special non-component keys that appear in configs +_NON_COMPONENT_KEYS = frozenset( + { + CONF_ESPHOME, + "substitutions", + "packages", + "globals", + "external_components", + "<<", + } +) + + +def detect_external_components(config: ConfigType) -> set[str]: + """Detect external/custom components in the configuration. + + External components are those that appear in the config but are not + part of ESPHome's built-in components and are not special config keys. + + Args: + config: The ESPHome configuration dictionary + + Returns: + A set of external component names + """ + from esphome.analyze_memory.helpers import get_esphome_components + + builtin_components = get_esphome_components() + return { + key + for key in config + if key not in builtin_components and key not in _NON_COMPONENT_KEYS + } + class ArgsProtocol(Protocol): device: list[str] | None @@ -468,7 +502,9 @@ def write_cpp_file() -> int: def compile_program(args: ArgsProtocol, config: ConfigType) -> int: from esphome import platformio_api - _LOGGER.info("Compiling app...") + # NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py + # If you change this format, update the regex in that script as well + _LOGGER.info("Compiling app... Build path: %s", CORE.build_path) rc = platformio_api.run_compile(config, CORE.verbose) if rc != 0: return rc @@ -897,6 +933,54 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int: return 0 +def command_analyze_memory(args: ArgsProtocol, config: ConfigType) -> int: + """Analyze memory usage by component. + + This command compiles the configuration and performs memory analysis. + Compilation is fast if sources haven't changed (just relinking). + """ + from esphome import platformio_api + from esphome.analyze_memory.cli import MemoryAnalyzerCLI + + # Always compile to ensure fresh data (fast if no changes - just relinks) + exit_code = write_cpp(config) + if exit_code != 0: + return exit_code + exit_code = compile_program(args, config) + if exit_code != 0: + return exit_code + _LOGGER.info("Successfully compiled program.") + + # Get idedata for analysis + idedata = platformio_api.get_idedata(config) + if idedata is None: + _LOGGER.error("Failed to get IDE data for memory analysis") + return 1 + + firmware_elf = Path(idedata.firmware_elf_path) + + # Extract external components from config + external_components = detect_external_components(config) + _LOGGER.debug("Detected external components: %s", external_components) + + # Perform memory analysis + _LOGGER.info("Analyzing memory usage...") + analyzer = MemoryAnalyzerCLI( + str(firmware_elf), + idedata.objdump_path, + idedata.readelf_path, + external_components, + ) + analyzer.analyze() + + # Generate and display report + report = analyzer.generate_report() + print() + print(report) + + return 0 + + def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None: new_name = args.name for c in new_name: @@ -1012,6 +1096,7 @@ POST_CONFIG_ACTIONS = { "idedata": command_idedata, "rename": command_rename, "discover": command_discover, + "analyze-memory": command_analyze_memory, } SIMPLE_CONFIG_ACTIONS = [ @@ -1308,6 +1393,14 @@ def parse_args(argv): ) parser_rename.add_argument("name", help="The new name for the device.", type=str) + parser_analyze_memory = subparsers.add_parser( + "analyze-memory", + help="Analyze memory usage by component.", + ) + parser_analyze_memory.add_argument( + "configuration", help="Your YAML configuration file(s).", nargs="+" + ) + # Keep backward compatibility with the old command line format of # esphome . # diff --git a/esphome/analyze_memory/__init__.py b/esphome/analyze_memory/__init__.py new file mode 100644 index 0000000000..71e86e3788 --- /dev/null +++ b/esphome/analyze_memory/__init__.py @@ -0,0 +1,502 @@ +"""Memory usage analyzer for ESPHome compiled binaries.""" + +from collections import defaultdict +from dataclasses import dataclass, field +import logging +from pathlib import Path +import re +import subprocess +from typing import TYPE_CHECKING + +from .const import ( + CORE_SUBCATEGORY_PATTERNS, + DEMANGLED_PATTERNS, + ESPHOME_COMPONENT_PATTERN, + SECTION_TO_ATTR, + SYMBOL_PATTERNS, +) +from .helpers import ( + get_component_class_patterns, + get_esphome_components, + map_section_name, + parse_symbol_line, +) + +if TYPE_CHECKING: + from esphome.platformio_api import IDEData + +_LOGGER = logging.getLogger(__name__) + +# GCC global constructor/destructor prefix annotations +_GCC_PREFIX_ANNOTATIONS = { + "_GLOBAL__sub_I_": "global constructor for", + "_GLOBAL__sub_D_": "global destructor for", +} + +# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2) +_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)") + +# C++ runtime patterns for categorization +_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"]) + +# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.) +_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"]) + +# Regex pattern for parsing readelf section headers +# Format: [ #] name type addr off size +_READELF_SECTION_PATTERN = re.compile( + r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)" +) + +# Component category prefixes +_COMPONENT_PREFIX_ESPHOME = "[esphome]" +_COMPONENT_PREFIX_EXTERNAL = "[external]" +_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core" +_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api" + +# C++ namespace prefixes +_NAMESPACE_ESPHOME = "esphome::" +_NAMESPACE_STD = "std::" + +# Type alias for symbol information: (symbol_name, size, component) +SymbolInfoType = tuple[str, int, str] + + +@dataclass +class MemorySection: + """Represents a memory section with its symbols.""" + + name: str + symbols: list[SymbolInfoType] = field(default_factory=list) + total_size: int = 0 + + +@dataclass +class ComponentMemory: + """Tracks memory usage for a component.""" + + name: str + text_size: int = 0 # Code in flash + rodata_size: int = 0 # Read-only data in flash + data_size: int = 0 # Initialized data (flash + ram) + bss_size: int = 0 # Uninitialized data (ram only) + symbol_count: int = 0 + + @property + def flash_total(self) -> int: + """Total flash usage (text + rodata + data).""" + return self.text_size + self.rodata_size + self.data_size + + @property + def ram_total(self) -> int: + """Total RAM usage (data + bss).""" + return self.data_size + self.bss_size + + +class MemoryAnalyzer: + """Analyzes memory usage from ELF files.""" + + def __init__( + self, + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + external_components: set[str] | None = None, + idedata: "IDEData | None" = None, + ) -> None: + """Initialize memory analyzer. + + Args: + elf_path: Path to ELF file to analyze + objdump_path: Path to objdump binary (auto-detected from idedata if not provided) + readelf_path: Path to readelf binary (auto-detected from idedata if not provided) + external_components: Set of external component names + idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths + """ + self.elf_path = Path(elf_path) + if not self.elf_path.exists(): + raise FileNotFoundError(f"ELF file not found: {elf_path}") + + # Auto-detect toolchain paths from idedata if not provided + if idedata is not None and (objdump_path is None or readelf_path is None): + objdump_path = objdump_path or idedata.objdump_path + readelf_path = readelf_path or idedata.readelf_path + _LOGGER.debug("Using toolchain paths from PlatformIO idedata") + + self.objdump_path = objdump_path or "objdump" + self.readelf_path = readelf_path or "readelf" + self.external_components = external_components or set() + + self.sections: dict[str, MemorySection] = {} + self.components: dict[str, ComponentMemory] = defaultdict( + lambda: ComponentMemory("") + ) + self._demangle_cache: dict[str, str] = {} + self._uncategorized_symbols: list[tuple[str, str, int]] = [] + self._esphome_core_symbols: list[ + tuple[str, str, int] + ] = [] # Track core symbols + self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) # Track symbols for all components + + def analyze(self) -> dict[str, ComponentMemory]: + """Analyze the ELF file and return component memory usage.""" + self._parse_sections() + self._parse_symbols() + self._categorize_symbols() + return dict(self.components) + + def _parse_sections(self) -> None: + """Parse section headers from ELF file.""" + result = subprocess.run( + [self.readelf_path, "-S", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Parse section headers + for line in result.stdout.splitlines(): + # Look for section entries + if not (match := _READELF_SECTION_PATTERN.match(line)): + continue + + section_name = match.group(1) + size_hex = match.group(2) + size = int(size_hex, 16) + + # Map to standard section name + mapped_section = map_section_name(section_name) + if not mapped_section: + continue + + if mapped_section not in self.sections: + self.sections[mapped_section] = MemorySection(mapped_section) + self.sections[mapped_section].total_size += size + + def _parse_symbols(self) -> None: + """Parse symbols from ELF file.""" + result = subprocess.run( + [self.objdump_path, "-t", str(self.elf_path)], + capture_output=True, + text=True, + check=True, + ) + + # Track seen addresses to avoid duplicates + seen_addresses: set[str] = set() + + for line in result.stdout.splitlines(): + if not (symbol_info := parse_symbol_line(line)): + continue + + section, name, size, address = symbol_info + + # Skip duplicate symbols at the same address (e.g., C1/C2 constructors) + if address in seen_addresses or section not in self.sections: + continue + + self.sections[section].symbols.append((name, size, "")) + seen_addresses.add(address) + + def _categorize_symbols(self) -> None: + """Categorize symbols by component.""" + # First, collect all unique symbol names for batch demangling + all_symbols = { + symbol_name + for section in self.sections.values() + for symbol_name, _, _ in section.symbols + } + + # Batch demangle all symbols at once + self._batch_demangle_symbols(list(all_symbols)) + + # Now categorize with cached demangled names + for section_name, section in self.sections.items(): + for symbol_name, size, _ in section.symbols: + component = self._identify_component(symbol_name) + + if component not in self.components: + self.components[component] = ComponentMemory(component) + + comp_mem = self.components[component] + comp_mem.symbol_count += 1 + + # Update the appropriate size attribute based on section + if attr_name := SECTION_TO_ATTR.get(section_name): + setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size) + + # Track uncategorized symbols + if component == "other" and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._uncategorized_symbols.append((symbol_name, demangled, size)) + + # Track ESPHome core symbols for detailed analysis + if component == _COMPONENT_CORE and size > 0: + demangled = self._demangle_symbol(symbol_name) + self._esphome_core_symbols.append((symbol_name, demangled, size)) + + # Track all component symbols for detailed analysis + if size > 0: + demangled = self._demangle_symbol(symbol_name) + self._component_symbols[component].append( + (symbol_name, demangled, size) + ) + + def _identify_component(self, symbol_name: str) -> str: + """Identify which component a symbol belongs to.""" + # Demangle C++ names if needed + demangled = self._demangle_symbol(symbol_name) + + # Check for special component classes first (before namespace pattern) + # This handles cases like esphome::ESPHomeOTAComponent which should map to ota + if _NAMESPACE_ESPHOME in demangled: + # Check for special component classes that include component name in the class + # For example: esphome::ESPHomeOTAComponent -> ota component + for component_name in get_esphome_components(): + patterns = get_component_class_patterns(component_name) + if any(pattern in demangled for pattern in patterns): + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" + + # Check for ESPHome component namespaces + match = ESPHOME_COMPONENT_PATTERN.search(demangled) + if match: + component_name = match.group(1) + # Strip trailing underscore if present (e.g., switch_ -> switch) + component_name = component_name.rstrip("_") + + # Check if this is an actual component in the components directory + if component_name in get_esphome_components(): + return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}" + # Check if this is a known external component from the config + if component_name in self.external_components: + return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}" + # Everything else in esphome:: namespace is core + return _COMPONENT_CORE + + # Check for esphome core namespace (no component namespace) + if _NAMESPACE_ESPHOME in demangled: + # If no component match found, it's core + return _COMPONENT_CORE + + # Check against symbol patterns + for component, patterns in SYMBOL_PATTERNS.items(): + if any(pattern in symbol_name for pattern in patterns): + return component + + # Check against demangled patterns + for component, patterns in DEMANGLED_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return component + + # Special cases that need more complex logic + + # Check if spi_flash vs spi_driver + if "spi_" in symbol_name or "SPI" in symbol_name: + return "spi_flash" if "spi_flash" in symbol_name else "spi_driver" + + # libc special printf variants + if ( + symbol_name.startswith("_") + and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "") + in _LIBC_PRINTF_SCANF_FAMILY + ): + return "libc" + + # Track uncategorized symbols for analysis + return "other" + + def _batch_demangle_symbols(self, symbols: list[str]) -> None: + """Batch demangle C++ symbol names for efficiency.""" + if not symbols: + return + + # Try to find the appropriate c++filt for the platform + cppfilt_cmd = "c++filt" + + _LOGGER.info("Demangling %d symbols", len(symbols)) + _LOGGER.debug("objdump_path = %s", self.objdump_path) + + # Check if we have a toolchain-specific c++filt + if self.objdump_path and self.objdump_path != "objdump": + # Replace objdump with c++filt in the path + potential_cppfilt = self.objdump_path.replace("objdump", "c++filt") + _LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt) + if Path(potential_cppfilt).exists(): + cppfilt_cmd = potential_cppfilt + _LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd) + else: + _LOGGER.info( + "✗ Toolchain c++filt not found at %s, using system c++filt", + potential_cppfilt, + ) + else: + _LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path) + + # Strip GCC optimization suffixes and prefixes before demangling + # Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt + # Prefixes like _GLOBAL__sub_I_ need to be removed and tracked + symbols_stripped: list[str] = [] + symbols_prefixes: list[str] = [] # Track removed prefixes + for symbol in symbols: + # Remove GCC optimization markers + stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol) + + # Handle GCC global constructor/initializer prefixes + # _GLOBAL__sub_I_ -> extract for demangling + prefix = "" + for gcc_prefix in _GCC_PREFIX_ANNOTATIONS: + if stripped.startswith(gcc_prefix): + prefix = gcc_prefix + stripped = stripped[len(prefix) :] + break + + symbols_stripped.append(stripped) + symbols_prefixes.append(prefix) + + try: + # Send all symbols to c++filt at once + result = subprocess.run( + [cppfilt_cmd], + input="\n".join(symbols_stripped), + capture_output=True, + text=True, + check=False, + ) + except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e: + # On error, cache originals + _LOGGER.warning("Failed to batch demangle symbols: %s", e) + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return + + if result.returncode != 0: + _LOGGER.warning( + "c++filt exited with code %d: %s", + result.returncode, + result.stderr[:200] if result.stderr else "(no error output)", + ) + # Cache originals on failure + for symbol in symbols: + self._demangle_cache[symbol] = symbol + return + + # Process demangled output + self._process_demangled_output( + symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd + ) + + def _process_demangled_output( + self, + symbols: list[str], + symbols_stripped: list[str], + symbols_prefixes: list[str], + demangled_output: str, + cppfilt_cmd: str, + ) -> None: + """Process demangled symbol output and populate cache. + + Args: + symbols: Original symbol names + symbols_stripped: Stripped symbol names sent to c++filt + symbols_prefixes: Removed prefixes to restore + demangled_output: Output from c++filt + cppfilt_cmd: Path to c++filt command (for logging) + """ + demangled_lines = demangled_output.strip().split("\n") + failed_count = 0 + + for original, stripped, prefix, demangled in zip( + symbols, symbols_stripped, symbols_prefixes, demangled_lines + ): + # Add back any prefix that was removed + demangled = self._restore_symbol_prefix(prefix, stripped, demangled) + + # If we stripped a suffix, add it back to the demangled name for clarity + if original != stripped and not prefix: + demangled = self._restore_symbol_suffix(original, demangled) + + self._demangle_cache[original] = demangled + + # Log symbols that failed to demangle (stayed the same as stripped version) + if stripped == demangled and stripped.startswith("_Z"): + failed_count += 1 + if failed_count <= 5: # Only log first 5 failures + _LOGGER.warning("Failed to demangle: %s", original) + + if failed_count == 0: + _LOGGER.info("Successfully demangled all %d symbols", len(symbols)) + return + + _LOGGER.warning( + "Failed to demangle %d/%d symbols using %s", + failed_count, + len(symbols), + cppfilt_cmd, + ) + + @staticmethod + def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str: + """Restore prefix that was removed before demangling. + + Args: + prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_") + stripped: Stripped symbol name + demangled: Demangled symbol name + + Returns: + Demangled name with prefix restored/annotated + """ + if not prefix: + return demangled + + # Successfully demangled - add descriptive prefix + if demangled != stripped and ( + annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix) + ): + return f"[{annotation}: {demangled}]" + + # Failed to demangle - restore original prefix + return prefix + demangled + + @staticmethod + def _restore_symbol_suffix(original: str, demangled: str) -> str: + """Restore GCC optimization suffix that was removed before demangling. + + Args: + original: Original symbol name with suffix + demangled: Demangled symbol name without suffix + + Returns: + Demangled name with suffix annotation + """ + if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original): + return f"{demangled} [{suffix_match.group(1)}]" + return demangled + + def _demangle_symbol(self, symbol: str) -> str: + """Get demangled C++ symbol name from cache.""" + return self._demangle_cache.get(symbol, symbol) + + def _categorize_esphome_core_symbol(self, demangled: str) -> str: + """Categorize ESPHome core symbols into subcategories.""" + # Special patterns that need to be checked separately + if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS): + return "C++ Runtime (vtables/RTTI)" + + if demangled.startswith(_NAMESPACE_STD): + return "C++ STL" + + # Check against patterns from const.py + for category, patterns in CORE_SUBCATEGORY_PATTERNS.items(): + if any(pattern in demangled for pattern in patterns): + return category + + return "Other Core" + + +if __name__ == "__main__": + from .cli import main + + main() diff --git a/esphome/analyze_memory/__main__.py b/esphome/analyze_memory/__main__.py new file mode 100644 index 0000000000..aa772c3ad4 --- /dev/null +++ b/esphome/analyze_memory/__main__.py @@ -0,0 +1,6 @@ +"""Main entry point for running the memory analyzer as a module.""" + +from .cli import main + +if __name__ == "__main__": + main() diff --git a/esphome/analyze_memory/cli.py b/esphome/analyze_memory/cli.py new file mode 100644 index 0000000000..718f42330d --- /dev/null +++ b/esphome/analyze_memory/cli.py @@ -0,0 +1,421 @@ +"""CLI interface for memory analysis with report generation.""" + +from collections import defaultdict +import sys + +from . import ( + _COMPONENT_API, + _COMPONENT_CORE, + _COMPONENT_PREFIX_ESPHOME, + _COMPONENT_PREFIX_EXTERNAL, + MemoryAnalyzer, +) + + +class MemoryAnalyzerCLI(MemoryAnalyzer): + """Memory analyzer with CLI-specific report generation.""" + + # Column width constants + COL_COMPONENT: int = 29 + COL_FLASH_TEXT: int = 14 + COL_FLASH_DATA: int = 14 + COL_RAM_DATA: int = 12 + COL_RAM_BSS: int = 12 + COL_TOTAL_FLASH: int = 15 + COL_TOTAL_RAM: int = 12 + COL_SEPARATOR: int = 3 # " | " + + # Core analysis column widths + COL_CORE_SUBCATEGORY: int = 30 + COL_CORE_SIZE: int = 12 + COL_CORE_COUNT: int = 6 + COL_CORE_PERCENT: int = 10 + + # Calculate table width once at class level + TABLE_WIDTH: int = ( + COL_COMPONENT + + COL_SEPARATOR + + COL_FLASH_TEXT + + COL_SEPARATOR + + COL_FLASH_DATA + + COL_SEPARATOR + + COL_RAM_DATA + + COL_SEPARATOR + + COL_RAM_BSS + + COL_SEPARATOR + + COL_TOTAL_FLASH + + COL_SEPARATOR + + COL_TOTAL_RAM + ) + + @staticmethod + def _make_separator_line(*widths: int) -> str: + """Create a separator line with given column widths. + + Args: + widths: Column widths to create separators for + + Returns: + Separator line like "----+---------+-----" + """ + return "-+-".join("-" * width for width in widths) + + # Pre-computed separator lines + MAIN_TABLE_SEPARATOR: str = _make_separator_line( + COL_COMPONENT, + COL_FLASH_TEXT, + COL_FLASH_DATA, + COL_RAM_DATA, + COL_RAM_BSS, + COL_TOTAL_FLASH, + COL_TOTAL_RAM, + ) + + CORE_TABLE_SEPARATOR: str = _make_separator_line( + COL_CORE_SUBCATEGORY, + COL_CORE_SIZE, + COL_CORE_COUNT, + COL_CORE_PERCENT, + ) + + def generate_report(self, detailed: bool = False) -> str: + """Generate a formatted memory report.""" + components = sorted( + self.components.items(), key=lambda x: x[1].flash_total, reverse=True + ) + + # Calculate totals + total_flash = sum(c.flash_total for _, c in components) + total_ram = sum(c.ram_total for _, c in components) + + # Build report + lines: list[str] = [] + + lines.append("=" * self.TABLE_WIDTH) + lines.append("Component Memory Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Main table - fixed column widths + lines.append( + f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}" + ) + lines.append(self.MAIN_TABLE_SEPARATOR) + + for name, mem in components: + if mem.flash_total > 0 or mem.ram_total > 0: + flash_rodata = mem.rodata_size + mem.data_size + lines.append( + f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | " + f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | " + f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B" + ) + + lines.append(self.MAIN_TABLE_SEPARATOR) + lines.append( + f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | " + f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | " + f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B" + ) + + # Top consumers + lines.append("") + lines.append("Top Flash Consumers:") + for i, (name, mem) in enumerate(components[:25]): + if mem.flash_total > 0: + percentage = ( + (mem.flash_total / total_flash * 100) if total_flash > 0 else 0 + ) + lines.append( + f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash" + ) + + lines.append("") + lines.append("Top RAM Consumers:") + ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True) + for i, (name, mem) in enumerate(ram_components[:25]): + if mem.ram_total > 0: + percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0 + lines.append( + f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM" + ) + + lines.append("") + lines.append( + "Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included." + ) + lines.append("=" * self.TABLE_WIDTH) + + # Add ESPHome core detailed analysis if there are core symbols + if self._esphome_core_symbols: + lines.append("") + lines.append("=" * self.TABLE_WIDTH) + lines.append( + f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH) + ) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Group core symbols by subcategory + core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict( + list + ) + + for symbol, demangled, size in self._esphome_core_symbols: + # Categorize based on demangled name patterns + subcategory = self._categorize_esphome_core_symbol(demangled) + core_subcategories[subcategory].append((symbol, demangled, size)) + + # Sort subcategories by total size + sorted_subcategories = sorted( + [ + (name, symbols, sum(s[2] for s in symbols)) + for name, symbols in core_subcategories.items() + ], + key=lambda x: x[2], + reverse=True, + ) + + lines.append( + f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | " + f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}" + ) + lines.append(self.CORE_TABLE_SEPARATOR) + + core_total = sum(size for _, _, size in self._esphome_core_symbols) + + for subcategory, symbols, total_size in sorted_subcategories: + percentage = (total_size / core_total * 100) if core_total > 0 else 0 + lines.append( + f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | " + f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%" + ) + + # Top 15 largest core symbols + lines.append("") + lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:") + sorted_core_symbols = sorted( + self._esphome_core_symbols, key=lambda x: x[2], reverse=True + ) + + for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * self.TABLE_WIDTH) + + # Add detailed analysis for top ESPHome and external components + esphome_components = [ + (name, mem) + for name, mem in components + if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE + ] + external_components = [ + (name, mem) + for name, mem in components + if name.startswith(_COMPONENT_PREFIX_EXTERNAL) + ] + + top_esphome_components = sorted( + esphome_components, key=lambda x: x[1].flash_total, reverse=True + )[:30] + + # Include all external components (they're usually important) + top_external_components = sorted( + external_components, key=lambda x: x[1].flash_total, reverse=True + ) + + # Check if API component exists and ensure it's included + api_component = None + for name, mem in components: + if name == _COMPONENT_API: + api_component = (name, mem) + break + + # Also include wifi_stack and other important system components if they exist + system_components_to_include = [ + # Empty list - we've finished debugging symbol categorization + # Add component names here if you need to debug their symbols + ] + system_components = [ + (name, mem) + for name, mem in components + if name in system_components_to_include + ] + + # Combine all components to analyze: top ESPHome + all external + API if not already included + system components + components_to_analyze = ( + list(top_esphome_components) + + list(top_external_components) + + system_components + ) + if api_component and api_component not in components_to_analyze: + components_to_analyze.append(api_component) + + if components_to_analyze: + for comp_name, comp_mem in components_to_analyze: + if not (comp_symbols := self._component_symbols.get(comp_name, [])): + continue + lines.append("") + lines.append("=" * self.TABLE_WIDTH) + lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH)) + lines.append("=" * self.TABLE_WIDTH) + lines.append("") + + # Sort symbols by size + sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True) + + lines.append(f"Total symbols: {len(sorted_symbols)}") + lines.append(f"Total size: {comp_mem.flash_total:,} B") + lines.append("") + + # Show all symbols > 100 bytes for better visibility + large_symbols = [ + (sym, dem, size) for sym, dem, size in sorted_symbols if size > 100 + ] + + lines.append( + f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):" + ) + for i, (symbol, demangled, size) in enumerate(large_symbols): + lines.append(f"{i + 1}. {demangled} ({size:,} B)") + + lines.append("=" * self.TABLE_WIDTH) + + return "\n".join(lines) + + def dump_uncategorized_symbols(self, output_file: str | None = None) -> None: + """Dump uncategorized symbols for analysis.""" + # Sort by size descending + sorted_symbols = sorted( + self._uncategorized_symbols, key=lambda x: x[2], reverse=True + ) + + lines = ["Uncategorized Symbols Analysis", "=" * 80] + lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}") + lines.append( + f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes" + ) + lines.append("") + lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled") + lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40) + + for symbol, demangled, size in sorted_symbols[:100]: # Top 100 + demangled_display = ( + demangled[:100] if symbol != demangled else "[not demangled]" + ) + lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}") + + if len(sorted_symbols) > 100: + lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols") + + content = "\n".join(lines) + + if output_file: + with open(output_file, "w", encoding="utf-8") as f: + f.write(content) + else: + print(content) + + +def analyze_elf( + elf_path: str, + objdump_path: str | None = None, + readelf_path: str | None = None, + detailed: bool = False, + external_components: set[str] | None = None, +) -> str: + """Analyze an ELF file and return a memory report.""" + analyzer = MemoryAnalyzerCLI( + elf_path, objdump_path, readelf_path, external_components + ) + analyzer.analyze() + return analyzer.generate_report(detailed) + + +def main(): + """CLI entrypoint for memory analysis.""" + if len(sys.argv) < 2: + print("Usage: python -m esphome.analyze_memory ") + print("\nAnalyze memory usage from an ESPHome build directory.") + print("The build directory should contain firmware.elf and idedata will be") + print("loaded from ~/.esphome/.internal/idedata/.json") + print("\nExamples:") + print(" python -m esphome.analyze_memory ~/.esphome/build/my-device") + print(" python -m esphome.analyze_memory .esphome/build/my-device") + print(" python -m esphome.analyze_memory my-device # Short form") + sys.exit(1) + + build_dir = sys.argv[1] + + # Load build directory + import json + from pathlib import Path + + from esphome.platformio_api import IDEData + + build_path = Path(build_dir) + + # If no path separator in name, assume it's a device name + if "/" not in build_dir and not build_path.is_dir(): + # Try current directory first + cwd_path = Path.cwd() / ".esphome" / "build" / build_dir + if cwd_path.is_dir(): + build_path = cwd_path + print(f"Using build directory: {build_path}", file=sys.stderr) + else: + # Fall back to home directory + build_path = Path.home() / ".esphome" / "build" / build_dir + print(f"Using build directory: {build_path}", file=sys.stderr) + + if not build_path.is_dir(): + print(f"Error: {build_path} is not a directory", file=sys.stderr) + sys.exit(1) + + # Find firmware.elf + elf_file = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_file = str(elf_candidate) + break + + if not elf_file: + print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr) + sys.exit(1) + + # Find idedata.json - check current directory first, then home + device_name = build_path.name + idedata_candidates = [ + Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json", + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + ] + + idedata = None + for idedata_path in idedata_candidates: + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print(f"Warning: Failed to load idedata: {e}", file=sys.stderr) + + if not idedata: + print( + f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})", + file=sys.stderr, + ) + + analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata) + analyzer.analyze() + report = analyzer.generate_report() + print(report) + + +if __name__ == "__main__": + main() diff --git a/esphome/analyze_memory/const.py b/esphome/analyze_memory/const.py new file mode 100644 index 0000000000..78af82059f --- /dev/null +++ b/esphome/analyze_memory/const.py @@ -0,0 +1,1052 @@ +"""Constants for memory analysis symbol pattern matching.""" + +import re + +# Pattern to extract ESPHome component namespaces dynamically +ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::") + +# Section mapping for ELF file sections +# Maps standard section names to their various platform-specific variants +SECTION_MAPPING = { + ".text": frozenset([".text", ".iram"]), + ".rodata": frozenset([".rodata"]), + ".data": frozenset([".data", ".dram"]), + ".bss": frozenset([".bss"]), +} + +# Section to ComponentMemory attribute mapping +# Maps section names to the attribute name in ComponentMemory dataclass +SECTION_TO_ATTR = { + ".text": "text_size", + ".rodata": "rodata_size", + ".data": "data_size", + ".bss": "bss_size", +} + +# Component identification rules +# Symbol patterns: patterns found in raw symbol names +SYMBOL_PATTERNS = { + "freertos": [ + "vTask", + "xTask", + "xQueue", + "pvPort", + "vPort", + "uxTask", + "pcTask", + "prvTimerTask", + "prvAddNewTaskToReadyList", + "pxReadyTasksLists", + "prvAddCurrentTaskToDelayedList", + "xEventGroupWaitBits", + "xRingbufferSendFromISR", + "prvSendItemDoneNoSplit", + "prvReceiveGeneric", + "prvSendAcquireGeneric", + "prvCopyItemAllowSplit", + "xEventGroup", + "xRingbuffer", + "prvSend", + "prvReceive", + "prvCopy", + "xPort", + "ulTaskGenericNotifyTake", + "prvIdleTask", + "prvInitialiseNewTask", + "prvIsYieldRequiredSMP", + "prvGetItemByteBuf", + "prvInitializeNewRingbuffer", + "prvAcquireItemNoSplit", + "prvNotifyQueueSetContainer", + "ucStaticTimerQueueStorage", + "eTaskGetState", + "main_task", + "do_system_init_fn", + "xSemaphoreCreateGenericWithCaps", + "vListInsert", + "uxListRemove", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "prvCheckItemFitsByteBuffer", + "prvGetCurMaxSizeAllowSplit", + "tick_hook", + "sys_sem_new", + "sys_arch_mbox_fetch", + "sys_arch_sem_wait", + "prvDeleteTCB", + "vQueueDeleteWithCaps", + "vRingbufferDeleteWithCaps", + "vSemaphoreDeleteWithCaps", + "prvCheckItemAvail", + "prvCheckTaskCanBeScheduledSMP", + "prvGetCurMaxSizeNoSplit", + "prvResetNextTaskUnblockTime", + "prvReturnItemByteBuf", + "vApplicationStackOverflowHook", + "vApplicationGetIdleTaskMemory", + "sys_init", + "sys_mbox_new", + "sys_arch_mbox_tryfetch", + ], + "xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"], + "heap": ["heap_", "multi_heap"], + "spi_flash": ["spi_flash"], + "rtc": ["rtc_", "rtcio_ll_"], + "gpio_driver": ["gpio_", "pins"], + "uart_driver": ["uart", "_uart", "UART"], + "timer": ["timer_", "esp_timer"], + "peripherals": ["periph_", "periman"], + "network_stack": [ + "vj_compress", + "raw_sendto", + "raw_input", + "etharp_", + "icmp_input", + "socket_ipv6", + "ip_napt", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + "netconn_", + "recv_raw", + "accept_function", + "netconn_recv_data", + "netconn_accept", + "netconn_write_vectors_partly", + "netconn_drain", + "raw_connect", + "raw_bind", + "icmp_send_response", + "sockets", + "icmp_dest_unreach", + "inet_chksum_pseudo", + "alloc_socket", + "done_socket", + "set_global_fd_sets", + "inet_chksum_pbuf", + "tryget_socket_unconn_locked", + "tryget_socket_unconn", + "cs_create_ctrl_sock", + "netbuf_alloc", + "tcp_", # TCP protocol functions + "udp_", # UDP protocol functions + "lwip_", # LwIP stack functions + "eagle_lwip", # ESP-specific LwIP functions + "new_linkoutput", # Link output function + "acd_", # Address Conflict Detection (ACD) + "eth_", # Ethernet functions + "mac_enable_bb", # MAC baseband enable + "reassemble_and_dispatch", # Packet reassembly + ], + # dhcp must come before libc to avoid "dhcp_select" matching "select" pattern + "dhcp": ["dhcp", "handle_dhcp"], + "ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"], + # Order matters! More specific categories must come before general ones. + # mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern + "mdns_lib": ["mdns"], + # memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols + "memory_mgmt": [ + "mem_", + "memory_", + "tlsf_", + "memp_", + "pbuf_", + "pbuf_alloc", + "pbuf_copy_partial_pbuf", + "esp_mmu_map", + "mmu_hal_", + "s_do_mapping", # Memory mapping function, not WiFi + "hash_map_", # Hash map data structure + "umm_assimilate", # UMM malloc assimilation + ], + # Bluetooth categories must come BEFORE wifi_stack to avoid misclassification + # Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns + "bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"], + "bluedroid_bt": [ + "bluedroid", + "btc_", + "bta_", + "btm_", + "btu_", + "BTM_", + "GATT", + "L2CA_", + "smp_", + "gatts_", + "attp_", + "l2cu_", + "l2cb", + "smp_cb", + "BTA_GATTC_", + "SMP_", + "BTU_", + "BTA_Dm", + "GAP_Ble", + "BT_tx_if", + "host_recv_pkt_cb", + "saved_local_oob_data", + "string_to_bdaddr", + "string_is_bdaddr", + "CalConnectParamTimeout", + "transmit_fragment", + "transmit_data", + "event_command_ready", + "read_command_complete_header", + "parse_read_local_extended_features_response", + "parse_read_local_version_info_response", + "should_request_high", + "btdm_wakeup_request", + "BTA_SetAttributeValue", + "BTA_EnableBluetooth", + "transmit_command_futured", + "transmit_command", + "get_waiting_command", + "make_command", + "transmit_downward", + "host_recv_adv_packet", + "copy_extra_byte_in_db", + "parse_read_local_supported_commands_response", + ], + "bluetooth": [ + "bt_", + "_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_" + "l2c_", + "l2ble_", # L2CAP for BLE + "gatt_", + "gap_", + "hci_", + "btsnd_hcic_", # Bluetooth HCI command send functions + "BT_init", + "BT_tx_", # Bluetooth transmit functions + "esp_ble_", # Catch esp_ble_* functions + ], + "bluetooth_ll": [ + "llm_", # Link layer manager + "llc_", # Link layer control + "lld_", # Link layer driver + "ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented) + "llcp_", # Link layer control protocol + "lmp_", # Link manager protocol + ], + "wifi_bt_coex": ["coex"], + "wifi_stack": [ + "ieee80211", + "hostap", + "sta_", + "wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_" + "wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts + "wifi_", + "wpa_", + "wps_", + "esp_wifi", + "cnx_", + "wpa3_", + "sae_", + "wDev_", + "ic_mac_", # More specific than "mac_" to avoid matching emac_ + "esf_buf", + "gWpaSm", + "sm_WPA", + "eapol_", + "owe_", + "wifiLowLevelInit", + # Removed "s_do_mapping" - this is memory management, not WiFi + "gScanStruct", + "ppSearchTxframe", + "ppMapWaitTxq", + "ppFillAMPDUBar", + "ppCheckTxConnTrafficIdle", + "ppCalTkipMic", + "phy_force_wifi", + "phy_unforce_wifi", + "write_wifi_chan", + "wifi_track_pll", + ], + "crypto_math": [ + "ecp_", + "bignum_", + "mpi_", + "sswu", + "modp", + "dragonfly_", + "gcm_mult", + "__multiply", + "quorem", + "__mdiff", + "__lshift", + "__mprec_tens", + "ECC_", + "multiprecision_", + "mix_sub_columns", + "sbox", + "gfm2_sbox", + "gfm3_sbox", + "curve_p256", + "curve", + "p_256_init_curve", + "shift_sub_rows", + "rshift", + "rijndaelEncrypt", # AES Rijndael encryption + ], + # System and Arduino core functions must come before libc + "esp_system": [ + "system_", # ESP system functions + "postmortem_", # Postmortem reporting + ], + "arduino_core": [ + "pinMode", + "resetPins", + "millis", + "micros", + "delay(", # More specific - Arduino delay function with parenthesis + "delayMicroseconds", + "digitalWrite", + "digitalRead", + ], + "sntp": ["sntp_", "sntp_recv"], + "scheduler": [ + "run_scheduled_", + "compute_scheduled_", + "event_TaskQueue", + ], + "hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"], + "libc": [ + "printf", + "scanf", + "malloc", + "_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring + "umm_free", # UMM malloc free function + "memcpy", + "memset", + "strcpy", + "strlen", + "_dtoa", + "_fopen", + "__sfvwrite_r", + "qsort", + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + "strncpy", + "_strtod_l", + "__gethex", + "__hexnan", + "_setenv_r", + "_tzset_unlocked_r", + "__tzcalc_limits", + "_select", # More specific than "select" to avoid matching "dhcp_select", etc. + "scalbnf", + "strtof", + "strtof_l", + "__d2b", + "__b2d", + "__s2b", + "_Balloc", + "__multadd", + "__lo0bits", + "__atexit0", + "__smakebuf_r", + "__swhatbuf_r", + "_sungetc_r", + "_close_r", + "_link_r", + "_unsetenv_r", + "_rename_r", + "__month_lengths", + "tzinfo", + "__ratio", + "__hi0bits", + "__ulp", + "__any_on", + "__copybits", + "L_shift", + "_fcntl_r", + "_lseek_r", + "_read_r", + "_write_r", + "_unlink_r", + "_fstat_r", + "access", + "fsync", + "tcsetattr", + "tcgetattr", + "tcflush", + "tcdrain", + "__ssrefill_r", + "_stat_r", + "__hexdig_fun", + "__mcmp", + "_fwalk_sglue", + "__fpclassifyf", + "_setlocale_r", + "_mbrtowc_r", + "fcntl", + "__match", + "_lock_close", + "__c$", + "__func__$", + "__FUNCTION__$", + "DAYS_IN_MONTH", + "_DAYS_BEFORE_MONTH", + "CSWTCH$", + "dst$", + "sulp", + "_strtol_l", # String to long with locale + "__cvt", # Convert + "__utoa", # Unsigned to ASCII + "__global_locale", # Global locale + "_ctype_", # Character type + "impure_data", # Impure data + ], + "string_ops": [ + "strcmp", + "strncmp", + "strchr", + "strstr", + "strtok", + "strdup", + "strncasecmp_P", # String compare (case insensitive, from program memory) + "strnlen_P", # String length (from program memory) + "strncat_P", # String concatenate (from program memory) + ], + "memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"], + "file_io": [ + "fread", + "fwrite", + "fopen", + "fclose", + "fseek", + "ftell", + "fflush", + "s_fd_table", + ], + "string_formatting": [ + "snprintf", + "vsnprintf", + "sprintf", + "vsprintf", + "sscanf", + "vsscanf", + ], + "cpp_anonymous": ["_GLOBAL__N_", "n$"], + # Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS + "nvs": ["nvs_"], # Plain C NVS functions + "ota": ["ota_", "OTA", "esp_ota", "app_desc"], + # cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly + # Only keep patterns that are truly runtime-specific and not categorizable by namespace + "cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"], + "exception_handling": [ + "__cxa_", + "_Unwind_", + "__gcc_personality", + "uw_frame_state", + "search_object", # Search for exception handling object + "get_cie_encoding", # Get CIE encoding + "add_fdes", # Add frame description entries + "fde_unencoded_compare", # Compare FDEs + "fde_mixed_encoding_compare", # Compare mixed encoding FDEs + "frame_downheap", # Frame heap operations + "frame_heapsort", # Frame heap sorting + ], + "static_init": ["_GLOBAL__sub_I_"], + "phy_radio": [ + "phy_", + "rf_", + "chip_", + "register_chipv7", + "pbus_", + "bb_", + "fe_", + "rfcal_", + "ram_rfcal", + "tx_pwctrl", + "rx_chan", + "set_rx_gain", + "set_chan", + "agc_reg", + "ram_txiq", + "ram_txdc", + "ram_gen_rx_gain", + "rx_11b_opt", + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "pwdet_sar2_init", + "ram_iq_est_enable", + "ram_rfpll_set_freq", + "ant_wifirx_cfg", + "ant_btrx_cfg", + "force_txrxoff", + "force_txrx_off", + "tx_paon_set", + "opt_11b_resart", + "rfpll_1p2_opt", + "ram_dc_iq_est", + "ram_start_tx_tone", + "ram_en_pwdet", + "ram_cbw2040_cfg", + "rxdc_est_min", + "i2cmst_reg_init", + "temprature_sens_read", + "ram_restart_cal", + "ram_write_gain_mem", + "ram_wait_rfpll_cal_end", + "txcal_debuge_mode", + "ant_wifitx_cfg", + "reg_init_begin", + "tx_cap_init", # TX capacitance init + "ram_set_txcap", # RAM TX capacitance setting + "tx_atten_", # TX attenuation + "txiq_", # TX I/Q calibration + "ram_cal_", # RAM calibration + "ram_rxiq_", # RAM RX I/Q + "readvdd33", # Read VDD33 + "test_tout", # Test timeout + "tsen_meas", # Temperature sensor measurement + "bbpll_cal", # Baseband PLL calibration + "set_cal_", # Set calibration + "set_rfanagain_", # Set RF analog gain + "set_txdc_", # Set TX DC + "get_vdd33_", # Get VDD33 + "gen_rx_gain_table", # Generate RX gain table + "ram_ana_inf_gating_en", # RAM analog interface gating enable + "tx_cont_en", # TX continuous enable + "tx_delay_cfg", # TX delay configuration + "tx_gain_table_set", # TX gain table set + "check_and_reset_hw_deadlock", # Hardware deadlock check + "s_config", # System/hardware config + "chan14_mic_cfg", # Channel 14 MIC config + ], + "wifi_phy_pp": [ + "pp_", + "ppT", + "ppR", + "ppP", + "ppInstall", + "ppCalTxAMPDULength", + "ppCheckTx", # Packet processor TX check + "ppCal", # Packet processor calibration + "HdlAllBuffedEb", # Handle buffered EB + ], + "wifi_lmac": ["lmac"], + "wifi_device": [ + "wdev", + "wDev_", + "ic_set_sta", # Set station mode + "ic_set_vif", # Set virtual interface + ], + "power_mgmt": [ + "pm_", + "sleep", + "rtc_sleep", + "light_sleep", + "deep_sleep", + "power_down", + "g_pm", + "pmc", # Power Management Controller + ], + "hal_layer": ["hal_"], + "clock_mgmt": [ + "clk_", + "clock_", + "rtc_clk", + "apb_", + "cpu_freq", + "setCpuFrequencyMhz", + ], + "cache_mgmt": ["cache"], + "flash_ops": ["flash", "image_load"], + "interrupt_handlers": [ + "isr", + "interrupt", + "intr_", + "exc_", + "exception", + "port_IntStack", + ], + "wrapper_functions": ["_wrapper"], + "error_handling": ["panic", "abort", "assert", "error_", "fault"], + "authentication": ["auth"], + "ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"], + "ethernet_phy": [ + "emac_", + "eth_phy_", + "phy_tlk110", + "phy_lan87", + "phy_ip101", + "phy_rtl", + "phy_dp83", + "phy_ksz", + "lan87xx_", + "rtl8201_", + "ip101_", + "ksz80xx_", + "jl1101_", + "dp83848_", + "eth_on_state_changed", + ], + "threading": ["pthread_", "thread_", "_task_"], + "pthread": ["pthread"], + "synchronization": ["mutex", "semaphore", "spinlock", "portMUX"], + "math_lib": [ + "sin", + "cos", + "tan", + "sqrt", + "pow", + "exp", + "log", + "atan", + "asin", + "acos", + "floor", + "ceil", + "fabs", + "round", + ], + "random": ["rand", "random", "rng_", "prng"], + "time_lib": [ + "time", + "clock", + "gettimeofday", + "settimeofday", + "localtime", + "gmtime", + "mktime", + "strftime", + ], + "console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"], + "rom_functions": ["r_", "rom_"], + "compiler_runtime": [ + "__divdi3", + "__udivdi3", + "__moddi3", + "__muldi3", + "__ashldi3", + "__ashrdi3", + "__lshrdi3", + "__cmpdi2", + "__fixdfdi", + "__floatdidf", + ], + "libgcc": ["libgcc", "_divdi3", "_udivdi3"], + "boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"], + "bootloader": ["bootloader_", "esp_bootloader"], + "app_framework": ["app_", "initArduino", "setup", "loop", "Update"], + "weak_symbols": ["__weak_"], + "compiler_builtins": ["__builtin_"], + "vfs": ["vfs_", "VFS"], + "esp32_sdk": ["esp32_", "esp32c", "esp32s"], + "usb": ["usb_", "USB", "cdc_", "CDC"], + "i2c_driver": ["i2c_", "I2C"], + "i2s_driver": ["i2s_", "I2S"], + "spi_driver": ["spi_", "SPI"], + "adc_driver": ["adc_", "ADC"], + "dac_driver": ["dac_", "DAC"], + "touch_driver": ["touch_", "TOUCH"], + "pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"], + "rmt_driver": ["rmt_", "RMT"], + "pcnt_driver": ["pcnt_", "PCNT"], + "can_driver": ["can_", "CAN", "twai_", "TWAI"], + "sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"], + "temp_sensor": ["temp_sensor", "tsens_"], + "watchdog": ["wdt_", "WDT", "watchdog"], + "brownout": ["brownout", "bod_"], + "ulp": ["ulp_", "ULP"], + "psram": ["psram", "PSRAM", "spiram", "SPIRAM"], + "efuse": ["efuse", "EFUSE"], + "partition": ["partition", "esp_partition"], + "esp_event": ["esp_event", "event_loop", "event_callback"], + "esp_console": ["esp_console", "console_"], + "chip_specific": ["chip_", "esp_chip"], + "esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"], + "ipc": ["esp_ipc", "ipc_"], + "wifi_config": [ + "g_cnxMgr", + "gChmCxt", + "g_ic", + "TxRxCxt", + "s_dp", + "s_ni", + "s_reg_dump", + "packet$", + "d_mult_table", + "K", + "fcstab", + ], + "smartconfig": ["sc_ack_send"], + "rc_calibration": ["rc_cal", "rcUpdate"], + "noise_floor": ["noise_check"], + "rf_calibration": [ + "set_rx_sense", + "set_rx_gain_cal", + "set_chan_dig_gain", + "tx_pwctrl_init_cal", + "rfcal_txiq", + "set_tx_gain_table", + "correct_rfpll_offset", + "pll_correct_dcap", + "txiq_cal_init", + "pwdet_sar", + "rx_11b_opt", + ], + "wifi_crypto": [ + "pk_use_ecparams", + "process_segments", + "ccmp_", + "rc4_", + "aria_", + "mgf_mask", + "dh_group", + "ccmp_aad_nonce", + "ccmp_encrypt", + "rc4_skip", + "aria_sb1", + "aria_sb2", + "aria_is1", + "aria_is2", + "aria_sl", + "aria_a", + ], + "radio_control": ["fsm_input", "fsm_sconfreq"], + "pbuf": [ + "pbuf_", + ], + "event_group": ["xEventGroup"], + "ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"], + "provisioning": ["prov_", "prov_stop_and_notify"], + "scan": ["gScanStruct"], + "port": ["xPort"], + "elf_loader": [ + "elf_add", + "elf_add_note", + "elf_add_segment", + "process_image", + "read_encoded", + "read_encoded_value", + "read_encoded_value_with_base", + "process_image_header", + ], + "socket_api": [ + "sockets", + "netconn_", + "accept_function", + "recv_raw", + "socket_ipv4_multicast", + "socket_ipv6_multicast", + ], + "igmp": ["igmp_", "igmp_send", "igmp_input"], + "icmp6": ["icmp6_"], + "arp": ["arp_table"], + "ampdu": [ + "ampdu_", + "rcAmpdu", + "trc_onAmpduOp", + "rcAmpduLowerRate", + "ampdu_dispatch_upto", + ], + "ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"], + "rate_control": [ + "rssi_margin", + "rcGetSched", + "get_rate_fcc_index", + "rcGetRate", # Get rate + "rc_get_", # Rate control getters + "rc_set_", # Rate control setters + "rc_enable_", # Rate control enable functions + ], + "nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"], + "channel_mgmt": ["chm_init", "chm_set_current_channel"], + "trace": ["trc_init", "trc_onAmpduOp"], + "country_code": ["country_info", "country_info_24ghz"], + "multicore": ["do_multicore_settings"], + "Update_lib": ["Update"], + "stdio": [ + "__sf", + "__sflush_r", + "__srefill_r", + "_impure_data", + "_reclaim_reent", + "_open_r", + ], + "strncpy_ops": ["strncpy"], + "math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"], + "character_class": ["__chclass"], + "camellia": ["camellia_", "camellia_feistel"], + "crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"], + "event_buffer": ["g_eb_list_desc", "eb_space"], + "base_node": ["base_node_", "base_node_add_handler"], + "file_descriptor": ["s_fd_table"], + "tx_delay": ["tx_delay_cfg"], + "deinit": ["deinit_functions"], + "lcp_echo": ["LcpEchoCheck"], + "raw_api": ["raw_bind", "raw_connect"], + "checksum": ["process_checksum"], + "entry_management": ["add_entry"], + "esp_ota": ["esp_ota", "ota_", "read_otadata"], + "http_server": [ + "httpd_", + "parse_url_char", + "cb_headers_complete", + "delete_entry", + "validate_structure", + "config_save", + "config_new", + "verify_url", + "cb_url", + ], + "misc_system": [ + "alarm_cbs", + "start_up", + "tokens", + "unhex", + "osi_funcs_ro", + "enum_function", + "fragment_and_dispatch", + "alarm_set", + "osi_alarm_new", + "config_set_string", + "config_update_newest_section", + "config_remove_key", + "method_strings", + "interop_match", + "interop_database", + "__state_table", + "__action_table", + "s_stub_table", + "s_context", + "s_mmu_ctx", + "s_get_bus_mask", + "hli_queue_put", + "list_remove", + "list_delete", + "lock_acquire_generic", + "is_vect_desc_usable", + "io_mode_str", + "__c$20233", + "interface", + "read_id_core", + "subscribe_idle", + "unsubscribe_idle", + "s_clkout_handle", + "lock_release_generic", + "config_set_int", + "config_get_int", + "config_get_string", + "config_has_key", + "config_remove_section", + "osi_alarm_init", + "osi_alarm_deinit", + "fixed_queue_enqueue", + "fixed_queue_dequeue", + "fixed_queue_new", + "fixed_pkt_queue_enqueue", + "fixed_pkt_queue_new", + "list_append", + "list_prepend", + "list_insert_after", + "list_contains", + "list_get_node", + "hash_function_blob", + "cb_no_body", + "cb_on_body", + "profile_tab", + "get_arg", + "trim", + "buf$", + "process_appended_hash_and_sig$constprop$0", + "uuidType", + "allocate_svc_db_buf", + "_hostname_is_ours", + "s_hli_handlers", + "tick_cb", + "idle_cb", + "input", + "entry_find", + "section_find", + "find_bucket_entry_", + "config_has_section", + "hli_queue_create", + "hli_queue_get", + "hli_c_handler", + "future_ready", + "future_await", + "future_new", + "pkt_queue_enqueue", + "pkt_queue_dequeue", + "pkt_queue_cleanup", + "pkt_queue_create", + "pkt_queue_destroy", + "fixed_pkt_queue_dequeue", + "osi_alarm_cancel", + "osi_alarm_is_active", + "osi_sem_take", + "osi_event_create", + "osi_event_bind", + "alarm_cb_handler", + "list_foreach", + "list_back", + "list_front", + "list_clear", + "fixed_queue_try_peek_first", + "translate_path", + "get_idx", + "find_key", + "init", + "end", + "start", + "set_read_value", + "copy_address_list", + "copy_and_key", + "sdk_cfg_opts", + "leftshift_onebit", + "config_section_end", + "config_section_begin", + "find_entry_and_check_all_reset", + "image_validate", + "xPendingReadyList", + "vListInitialise", + "lock_init_generic", + "ant_bttx_cfg", + "ant_dft_cfg", + "cs_send_to_ctrl_sock", + "config_llc_util_funcs_reset", + "make_set_adv_report_flow_control", + "make_set_event_mask", + "raw_new", + "raw_remove", + "BTE_InitStack", + "parse_read_local_supported_features_response", + "__math_invalidf", + "tinytens", + "__mprec_tinytens", + "__mprec_bigtens", + "vRingbufferDelete", + "vRingbufferDeleteWithCaps", + "vRingbufferReturnItem", + "vRingbufferReturnItemFromISR", + "get_acl_data_size_ble", + "get_features_ble", + "get_features_classic", + "get_acl_packet_size_ble", + "get_acl_packet_size_classic", + "supports_extended_inquiry_response", + "supports_rssi_with_inquiry_results", + "supports_interlaced_inquiry_scan", + "supports_reading_remote_extended_features", + ], +} + +# Demangled patterns: patterns found in demangled C++ names +DEMANGLED_PATTERNS = { + "gpio_driver": ["GPIO"], + "uart_driver": ["UART"], + # mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder + "mdns_lib": [ + "MDNSResponder", + "MDNSImplementation", + "MDNS", + ], + "network_stack": [ + "lwip", + "tcp", + "udp", + "ip4", + "ip6", + "dhcp", + "dns", + "netif", + "ethernet", + "ppp", + "slip", + "UdpContext", # UDP context class + "DhcpServer", # DHCP server class + ], + "arduino_core": [ + "String::", # Arduino String class + "Print::", # Arduino Print class + "HardwareSerial::", # Serial class + "IPAddress::", # IP address class + "EspClass::", # ESP class + "experimental::_SPI", # Experimental SPI + ], + "ota": [ + "UpdaterClass", + "Updater::", + ], + "wifi": [ + "ESP8266WiFi", + "WiFi::", + ], + "wifi_stack": ["NetworkInterface"], + "nimble_bt": [ + "nimble", + "NimBLE", + "ble_hs", + "ble_gap", + "ble_gatt", + "ble_att", + "ble_l2cap", + "ble_sm", + ], + "crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"], + "cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"], + "static_init": ["__static_initialization"], + "rtti": ["__type_info", "__class_type_info"], + "web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"], + "async_tcp": ["AsyncClient", "AsyncServer"], + "json_lib": [ + "ArduinoJson", + "JsonDocument", + "JsonArray", + "JsonObject", + "deserialize", + "serialize", + ], + "http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"], + "logging": ["log", "Log", "print", "Print", "diag_"], + "authentication": ["checkDigestAuthentication"], + "libgcc": ["libgcc"], + "esp_system": ["esp_", "ESP"], + "arduino": ["arduino"], + "nvs": ["nvs_", "_ZTVN3nvs", "nvs::"], + "filesystem": ["spiffs", "vfs"], + "libc": ["newlib"], +} + +# Patterns for categorizing ESPHome core symbols into subcategories +CORE_SUBCATEGORY_PATTERNS = { + "Component Framework": ["Component"], + "Application Core": ["Application"], + "Scheduler": ["Scheduler"], + "Component Iterator": ["ComponentIterator"], + "Helper Functions": ["Helpers", "helpers"], + "Preferences/Storage": ["Preferences", "ESPPreferences"], + "I/O Utilities": ["HighFrequencyLoopRequester"], + "String Utilities": ["str_"], + "Bit Utilities": ["reverse_bits"], + "Data Conversion": ["convert_"], + "Network Utilities": ["network", "IPAddress"], + "API Protocol": ["api::"], + "WiFi Manager": ["wifi::"], + "MQTT Client": ["mqtt::"], + "Logger": ["logger::"], + "OTA Updates": ["ota::"], + "Web Server": ["web_server::"], + "Time Management": ["time::"], + "Sensor Framework": ["sensor::"], + "Binary Sensor": ["binary_sensor::"], + "Switch Framework": ["switch_::"], + "Light Framework": ["light::"], + "Climate Framework": ["climate::"], + "Cover Framework": ["cover::"], +} diff --git a/esphome/analyze_memory/helpers.py b/esphome/analyze_memory/helpers.py new file mode 100644 index 0000000000..cb503b37c5 --- /dev/null +++ b/esphome/analyze_memory/helpers.py @@ -0,0 +1,121 @@ +"""Helper functions for memory analysis.""" + +from functools import cache +from pathlib import Path + +from .const import SECTION_MAPPING + +# Import namespace constant from parent module +# Note: This would create a circular import if done at module level, +# so we'll define it locally here as well +_NAMESPACE_ESPHOME = "esphome::" + + +# Get the list of actual ESPHome components by scanning the components directory +@cache +def get_esphome_components(): + """Get set of actual ESPHome components from the components directory.""" + # Find the components directory relative to this file + # Go up two levels from analyze_memory/helpers.py to esphome/ + current_dir = Path(__file__).parent.parent + components_dir = current_dir / "components" + + if not components_dir.exists() or not components_dir.is_dir(): + return frozenset() + + return frozenset( + item.name + for item in components_dir.iterdir() + if item.is_dir() + and not item.name.startswith(".") + and not item.name.startswith("__") + ) + + +@cache +def get_component_class_patterns(component_name: str) -> list[str]: + """Generate component class name patterns for symbol matching. + + Args: + component_name: The component name (e.g., "ota", "wifi", "api") + + Returns: + List of pattern strings to match against demangled symbols + """ + component_upper = component_name.upper() + component_camel = component_name.replace("_", "").title() + return [ + f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent + f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent + f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent + ] + + +def map_section_name(raw_section: str) -> str | None: + """Map raw section name to standard section. + + Args: + raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1") + + Returns: + Standard section name (".text", ".rodata", ".data", ".bss") or None + """ + for standard_section, patterns in SECTION_MAPPING.items(): + if any(pattern in raw_section for pattern in patterns): + return standard_section + return None + + +def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None: + """Parse a single symbol line from objdump output. + + Args: + line: Line from objdump -t output + + Returns: + Tuple of (section, name, size, address) or None if not a valid symbol. + Format: address l/g w/d F/O section size name + Example: 40084870 l F .iram0.text 00000000 _xt_user_exc + """ + parts = line.split() + if len(parts) < 5: + return None + + try: + # Validate and extract address + address = parts[0] + int(address, 16) + except ValueError: + return None + + # Look for F (function) or O (object) flag + if "F" not in parts and "O" not in parts: + return None + + # Find section, size, and name + for i, part in enumerate(parts): + if not part.startswith("."): + continue + + section = map_section_name(part) + if not section: + break + + # Need at least size field after section + if i + 1 >= len(parts): + break + + try: + size = int(parts[i + 1], 16) + except ValueError: + break + + # Need symbol name and non-zero size + if i + 2 >= len(parts) or size == 0: + break + + name = " ".join(parts[i + 2 :]) + return (section, name, size, address) + + return None diff --git a/esphome/components/adalight/adalight_light_effect.cpp b/esphome/components/adalight/adalight_light_effect.cpp index 35e98d7360..4cf639a01f 100644 --- a/esphome/components/adalight/adalight_light_effect.cpp +++ b/esphome/components/adalight/adalight_light_effect.cpp @@ -9,7 +9,7 @@ static const char *const TAG = "adalight_light_effect"; static const uint32_t ADALIGHT_ACK_INTERVAL = 1000; static const uint32_t ADALIGHT_RECEIVE_TIMEOUT = 1000; -AdalightLightEffect::AdalightLightEffect(const std::string &name) : AddressableLightEffect(name) {} +AdalightLightEffect::AdalightLightEffect(const char *name) : AddressableLightEffect(name) {} void AdalightLightEffect::start() { AddressableLightEffect::start(); diff --git a/esphome/components/adalight/adalight_light_effect.h b/esphome/components/adalight/adalight_light_effect.h index 72faf44269..bb7319c99c 100644 --- a/esphome/components/adalight/adalight_light_effect.h +++ b/esphome/components/adalight/adalight_light_effect.h @@ -11,7 +11,7 @@ namespace adalight { class AdalightLightEffect : public light::AddressableLightEffect, public uart::UARTDevice { public: - AdalightLightEffect(const std::string &name); + AdalightLightEffect(const char *name); void start() override; void stop() override; diff --git a/esphome/components/anova/anova.h b/esphome/components/anova/anova.h index 560d96baa7..2e43ebfb98 100644 --- a/esphome/components/anova/anova.h +++ b/esphome/components/anova/anova.h @@ -28,7 +28,7 @@ class Anova : public climate::Climate, public esphome::ble_client::BLEClientNode void dump_config() override; climate::ClimateTraits traits() override { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::ClimateMode::CLIMATE_MODE_HEAT}); traits.set_visual_min_temperature(25.0); traits.set_visual_max_temperature(100.0); diff --git a/esphome/components/api/__init__.py b/esphome/components/api/__init__.py index e8dacf51bc..e91e922204 100644 --- a/esphome/components/api/__init__.py +++ b/esphome/components/api/__init__.py @@ -155,6 +155,17 @@ def _validate_api_config(config: ConfigType) -> ConfigType: return config +def _consume_api_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for API component.""" + from esphome.components import socket + + # API needs 1 listening socket + typically 3 concurrent client connections + # (not max_connections, which is the upper limit rarely reached) + sockets_needed = 1 + 3 + socket.consume_sockets(sockets_needed, "api")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -222,6 +233,7 @@ CONFIG_SCHEMA = cv.All( ).extend(cv.COMPONENT_SCHEMA), cv.rename_key(CONF_SERVICES, CONF_ACTIONS), _validate_api_config, + _consume_api_sockets, ) diff --git a/esphome/components/api/api.proto b/esphome/components/api/api.proto index 753adc3592..a4c2557ffe 100644 --- a/esphome/components/api/api.proto +++ b/esphome/components/api/api.proto @@ -425,7 +425,7 @@ message ListEntitiesFanResponse { bool disabled_by_default = 9; string icon = 10 [(field_ifdef) = "USE_ENTITY_ICON"]; EntityCategory entity_category = 11; - repeated string supported_preset_modes = 12 [(container_pointer) = "std::set"]; + repeated string supported_preset_modes = 12 [(container_pointer) = "std::vector"]; uint32 device_id = 13 [(field_ifdef) = "USE_DEVICES"]; } // Deprecated in API version 1.6 - only used in deprecated fields @@ -506,7 +506,7 @@ message ListEntitiesLightResponse { string name = 3; reserved 4; // Deprecated: was string unique_id - repeated ColorMode supported_color_modes = 12 [(container_pointer) = "std::set"]; + repeated ColorMode supported_color_modes = 12 [(container_pointer_no_template) = "light::ColorModeMask"]; // next four supports_* are for legacy clients, newer clients should use color modes // Deprecated in API version 1.6 bool legacy_supports_brightness = 5 [deprecated=true]; diff --git a/esphome/components/api/api_connection.cpp b/esphome/components/api/api_connection.cpp index 7dfefedd54..7c135946f8 100644 --- a/esphome/components/api/api_connection.cpp +++ b/esphome/components/api/api_connection.cpp @@ -453,7 +453,6 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection * bool is_single) { auto *light = static_cast(entity); LightStateResponse resp; - auto traits = light->get_traits(); auto values = light->remote_values; auto color_mode = values.get_color_mode(); resp.state = values.is_on(); @@ -477,7 +476,8 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c auto *light = static_cast(entity); ListEntitiesLightResponse msg; auto traits = light->get_traits(); - msg.supported_color_modes = &traits.get_supported_color_modes_for_api_(); + // Pass pointer to ColorModeMask so the iterator can encode actual ColorMode enum values + msg.supported_color_modes = &traits.get_supported_color_modes(); if (traits.supports_color_capability(light::ColorCapability::COLOR_TEMPERATURE) || traits.supports_color_capability(light::ColorCapability::COLD_WARM_WHITE)) { msg.min_mireds = traits.get_min_mireds(); @@ -1572,7 +1572,13 @@ bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryption resp.success = false; psk_t psk{}; - if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) { + if (msg.key.empty()) { + if (this->parent_->clear_noise_psk(true)) { + resp.success = true; + } else { + ESP_LOGW(TAG, "Failed to clear encryption key"); + } + } else if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) { ESP_LOGW(TAG, "Invalid encryption key length"); } else if (!this->parent_->save_noise_psk(psk, true)) { ESP_LOGW(TAG, "Failed to save encryption key"); diff --git a/esphome/components/api/api_options.proto b/esphome/components/api/api_options.proto index ead8ac0bbc..6b33408e2f 100644 --- a/esphome/components/api/api_options.proto +++ b/esphome/components/api/api_options.proto @@ -70,4 +70,14 @@ extend google.protobuf.FieldOptions { // init(size) before adding elements. This eliminates std::vector template overhead // and is ideal when the exact size is known before populating the array. optional bool fixed_vector = 50013 [default=false]; + + // container_pointer_no_template: Use a non-template container type for repeated fields + // Similar to container_pointer, but for containers that don't take template parameters. + // The container type is used as-is without appending element type. + // The container must have: + // - begin() and end() methods returning iterators + // - empty() method + // Example: [(container_pointer_no_template) = "light::ColorModeMask"] + // generates: const light::ColorModeMask *supported_color_modes{}; + optional string container_pointer_no_template = 50014; } diff --git a/esphome/components/api/api_pb2.h b/esphome/components/api/api_pb2.h index 5603204801..e71ad2c64e 100644 --- a/esphome/components/api/api_pb2.h +++ b/esphome/components/api/api_pb2.h @@ -725,7 +725,7 @@ class ListEntitiesFanResponse final : public InfoResponseProtoMessage { bool supports_speed{false}; bool supports_direction{false}; int32_t supported_speed_count{0}; - const std::set *supported_preset_modes{}; + const std::vector *supported_preset_modes{}; void encode(ProtoWriteBuffer buffer) const override; void calculate_size(ProtoSize &size) const override; #ifdef HAS_PROTO_MESSAGE_DUMP @@ -790,7 +790,7 @@ class ListEntitiesLightResponse final : public InfoResponseProtoMessage { #ifdef HAS_PROTO_MESSAGE_DUMP const char *message_name() const override { return "list_entities_light_response"; } #endif - const std::set *supported_color_modes{}; + const light::ColorModeMask *supported_color_modes{}; float min_mireds{0.0f}; float max_mireds{0.0f}; std::vector effects{}; diff --git a/esphome/components/api/api_server.cpp b/esphome/components/api/api_server.cpp index 778d9389ef..e618610a75 100644 --- a/esphome/components/api/api_server.cpp +++ b/esphome/components/api/api_server.cpp @@ -468,6 +468,31 @@ uint16_t APIServer::get_port() const { return this->port_; } void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeout_ = reboot_timeout; } #ifdef USE_API_NOISE +bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, + const LogString *fail_log_msg, const psk_t &active_psk, bool make_active) { + if (!this->noise_pref_.save(&new_psk)) { + ESP_LOGW(TAG, "%s", LOG_STR_ARG(fail_log_msg)); + return false; + } + // ensure it's written immediately + if (!global_preferences->sync()) { + ESP_LOGW(TAG, "Failed to sync preferences"); + return false; + } + ESP_LOGD(TAG, "%s", LOG_STR_ARG(save_log_msg)); + if (make_active) { + this->set_timeout(100, [this, active_psk]() { + ESP_LOGW(TAG, "Disconnecting all clients to reset PSK"); + this->set_noise_psk(active_psk); + for (auto &c : this->clients_) { + DisconnectRequest req; + c->send_message(req, DisconnectRequest::MESSAGE_TYPE); + } + }); + } + return true; +} + bool APIServer::save_noise_psk(psk_t psk, bool make_active) { #ifdef USE_API_NOISE_PSK_FROM_YAML // When PSK is set from YAML, this function should never be called @@ -482,27 +507,21 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) { } SavedNoisePsk new_saved_psk{psk}; - if (!this->noise_pref_.save(&new_saved_psk)) { - ESP_LOGW(TAG, "Failed to save Noise PSK"); - return false; - } - // ensure it's written immediately - if (!global_preferences->sync()) { - ESP_LOGW(TAG, "Failed to sync preferences"); - return false; - } - ESP_LOGD(TAG, "Noise PSK saved"); - if (make_active) { - this->set_timeout(100, [this, psk]() { - ESP_LOGW(TAG, "Disconnecting all clients to reset PSK"); - this->set_noise_psk(psk); - for (auto &c : this->clients_) { - DisconnectRequest req; - c->send_message(req, DisconnectRequest::MESSAGE_TYPE); - } - }); - } - return true; + return this->update_noise_psk_(new_saved_psk, LOG_STR("Noise PSK saved"), LOG_STR("Failed to save Noise PSK"), psk, + make_active); +#endif +} +bool APIServer::clear_noise_psk(bool make_active) { +#ifdef USE_API_NOISE_PSK_FROM_YAML + // When PSK is set from YAML, this function should never be called + // but if it is, reject the change + ESP_LOGW(TAG, "Key set in YAML"); + return false; +#else + SavedNoisePsk empty_psk{}; + psk_t empty{}; + return this->update_noise_psk_(empty_psk, LOG_STR("Noise PSK cleared"), LOG_STR("Failed to clear Noise PSK"), empty, + make_active); #endif } #endif diff --git a/esphome/components/api/api_server.h b/esphome/components/api/api_server.h index 5d038e5ddd..e0e23301d0 100644 --- a/esphome/components/api/api_server.h +++ b/esphome/components/api/api_server.h @@ -53,6 +53,7 @@ class APIServer : public Component, public Controller { #ifdef USE_API_NOISE bool save_noise_psk(psk_t psk, bool make_active = true); + bool clear_noise_psk(bool make_active = true); void set_noise_psk(psk_t psk) { noise_ctx_->set_psk(psk); } std::shared_ptr get_noise_ctx() { return noise_ctx_; } #endif // USE_API_NOISE @@ -174,6 +175,10 @@ class APIServer : public Component, public Controller { protected: void schedule_reboot_timeout_(); +#ifdef USE_API_NOISE + bool update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, const LogString *fail_log_msg, + const psk_t &active_psk, bool make_active); +#endif // USE_API_NOISE // Pointers and pointer-like types first (4 bytes each) std::unique_ptr socket_ = nullptr; #ifdef USE_API_CLIENT_CONNECTED_TRIGGER diff --git a/esphome/components/bang_bang/bang_bang_climate.cpp b/esphome/components/bang_bang/bang_bang_climate.cpp index bb85b49238..f26377a38a 100644 --- a/esphome/components/bang_bang/bang_bang_climate.cpp +++ b/esphome/components/bang_bang/bang_bang_climate.cpp @@ -6,6 +6,9 @@ namespace bang_bang { static const char *const TAG = "bang_bang.climate"; +BangBangClimate::BangBangClimate() + : idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {} + void BangBangClimate::setup() { this->sensor_->add_on_state_callback([this](float state) { this->current_temperature = state; @@ -31,53 +34,63 @@ void BangBangClimate::setup() { restore->to_call(this).perform(); } else { // restore from defaults, change_away handles those for us - if (supports_cool_ && supports_heat_) { + if (this->supports_cool_ && this->supports_heat_) { this->mode = climate::CLIMATE_MODE_HEAT_COOL; - } else if (supports_cool_) { + } else if (this->supports_cool_) { this->mode = climate::CLIMATE_MODE_COOL; - } else if (supports_heat_) { + } else if (this->supports_heat_) { this->mode = climate::CLIMATE_MODE_HEAT; } this->change_away_(false); } } + void BangBangClimate::control(const climate::ClimateCall &call) { - if (call.get_mode().has_value()) + if (call.get_mode().has_value()) { this->mode = *call.get_mode(); - if (call.get_target_temperature_low().has_value()) + } + if (call.get_target_temperature_low().has_value()) { this->target_temperature_low = *call.get_target_temperature_low(); - if (call.get_target_temperature_high().has_value()) + } + if (call.get_target_temperature_high().has_value()) { this->target_temperature_high = *call.get_target_temperature_high(); - if (call.get_preset().has_value()) + } + if (call.get_preset().has_value()) { this->change_away_(*call.get_preset() == climate::CLIMATE_PRESET_AWAY); + } this->compute_state_(); this->publish_state(); } + climate::ClimateTraits BangBangClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); - if (this->humidity_sensor_ != nullptr) - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | + climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); + if (this->humidity_sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); + } traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, }); - if (supports_cool_) + if (this->supports_cool_) { traits.add_supported_mode(climate::CLIMATE_MODE_COOL); - if (supports_heat_) + } + if (this->supports_heat_) { traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); - if (supports_cool_ && supports_heat_) + } + if (this->supports_cool_ && this->supports_heat_) { traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL); - traits.set_supports_two_point_target_temperature(true); - if (supports_away_) { + } + if (this->supports_away_) { traits.set_supported_presets({ climate::CLIMATE_PRESET_HOME, climate::CLIMATE_PRESET_AWAY, }); } - traits.set_supports_action(true); return traits; } + void BangBangClimate::compute_state_() { if (this->mode == climate::CLIMATE_MODE_OFF) { this->switch_to_action_(climate::CLIMATE_ACTION_OFF); @@ -122,6 +135,7 @@ void BangBangClimate::compute_state_() { this->switch_to_action_(target_action); } + void BangBangClimate::switch_to_action_(climate::ClimateAction action) { if (action == this->action) { // already in target mode @@ -166,6 +180,7 @@ void BangBangClimate::switch_to_action_(climate::ClimateAction action) { this->prev_trigger_ = trig; this->publish_state(); } + void BangBangClimate::change_away_(bool away) { if (!away) { this->target_temperature_low = this->normal_config_.default_temperature_low; @@ -176,22 +191,26 @@ void BangBangClimate::change_away_(bool away) { } this->preset = away ? climate::CLIMATE_PRESET_AWAY : climate::CLIMATE_PRESET_HOME; } + void BangBangClimate::set_normal_config(const BangBangClimateTargetTempConfig &normal_config) { this->normal_config_ = normal_config; } + void BangBangClimate::set_away_config(const BangBangClimateTargetTempConfig &away_config) { this->supports_away_ = true; this->away_config_ = away_config; } -BangBangClimate::BangBangClimate() - : idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {} + void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; } void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; } + Trigger<> *BangBangClimate::get_idle_trigger() const { return this->idle_trigger_; } Trigger<> *BangBangClimate::get_cool_trigger() const { return this->cool_trigger_; } -void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; } Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; } + +void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; } void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; } + void BangBangClimate::dump_config() { LOG_CLIMATE("", "Bang Bang Climate", this); ESP_LOGCONFIG(TAG, diff --git a/esphome/components/bang_bang/bang_bang_climate.h b/esphome/components/bang_bang/bang_bang_climate.h index 96368af34c..2e7da93a07 100644 --- a/esphome/components/bang_bang/bang_bang_climate.h +++ b/esphome/components/bang_bang/bang_bang_climate.h @@ -25,14 +25,15 @@ class BangBangClimate : public climate::Climate, public Component { void set_sensor(sensor::Sensor *sensor); void set_humidity_sensor(sensor::Sensor *humidity_sensor); - Trigger<> *get_idle_trigger() const; - Trigger<> *get_cool_trigger() const; void set_supports_cool(bool supports_cool); - Trigger<> *get_heat_trigger() const; void set_supports_heat(bool supports_heat); void set_normal_config(const BangBangClimateTargetTempConfig &normal_config); void set_away_config(const BangBangClimateTargetTempConfig &away_config); + Trigger<> *get_idle_trigger() const; + Trigger<> *get_cool_trigger() const; + Trigger<> *get_heat_trigger() const; + protected: /// Override control to change settings of the climate device. void control(const climate::ClimateCall &call) override; @@ -56,16 +57,10 @@ class BangBangClimate : public climate::Climate, public Component { * * In idle mode, the controller is assumed to have both heating and cooling disabled. */ - Trigger<> *idle_trigger_; + Trigger<> *idle_trigger_{nullptr}; /** The trigger to call when the controller should switch to cooling mode. */ - Trigger<> *cool_trigger_; - /** Whether the controller supports cooling. - * - * A false value for this attribute means that the controller has no cooling action - * (for example a thermostat, where only heating and not-heating is possible). - */ - bool supports_cool_{false}; + Trigger<> *cool_trigger_{nullptr}; /** The trigger to call when the controller should switch to heating mode. * * A null value for this attribute means that the controller has no heating action @@ -73,15 +68,23 @@ class BangBangClimate : public climate::Climate, public Component { * (blinds open) is possible. */ Trigger<> *heat_trigger_{nullptr}; - bool supports_heat_{false}; /** A reference to the trigger that was previously active. * * This is so that the previous trigger can be stopped before enabling a new one. */ Trigger<> *prev_trigger_{nullptr}; - BangBangClimateTargetTempConfig normal_config_{}; + /** Whether the controller supports cooling/heating + * + * A false value for this attribute means that the controller has no respective action + * (for example a thermostat, where only heating and not-heating is possible). + */ + bool supports_cool_{false}; + bool supports_heat_{false}; + bool supports_away_{false}; + + BangBangClimateTargetTempConfig normal_config_{}; BangBangClimateTargetTempConfig away_config_{}; }; diff --git a/esphome/components/bedjet/climate/bedjet_climate.h b/esphome/components/bedjet/climate/bedjet_climate.h index 7eaa735a3f..963f2e585a 100644 --- a/esphome/components/bedjet/climate/bedjet_climate.h +++ b/esphome/components/bedjet/climate/bedjet_climate.h @@ -33,8 +33,7 @@ class BedJetClimate : public climate::Climate, public BedJetClient, public Polli climate::ClimateTraits traits() override { auto traits = climate::ClimateTraits(); - traits.set_supports_action(true); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, diff --git a/esphome/components/binary_sensor/__init__.py b/esphome/components/binary_sensor/__init__.py index 6aa97d6e05..26e784a0b8 100644 --- a/esphome/components/binary_sensor/__init__.py +++ b/esphome/components/binary_sensor/__init__.py @@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id): ), ) async def autorepeat_filter_to_code(config, filter_id): - timings = [] if len(config) > 0: - timings.extend( - (conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON]) - for conf in config - ) - else: - timings.append( - ( - cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, - cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", conf[CONF_DELAY]), + ("time_off", conf[CONF_TIME_OFF]), + ("time_on", conf[CONF_TIME_ON]), ) - ) + for conf in config + ] + else: + timings = [ + cg.StructInitializer( + cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"), + ("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds), + ( + "time_off", + cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds, + ), + ( + "time_on", + cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds, + ), + ) + ] var = cg.new_Pvariable(filter_id, timings) await cg.register_component(var, {}) return var diff --git a/esphome/components/binary_sensor/automation.h b/esphome/components/binary_sensor/automation.h index b46436dc41..0bc7b9acb3 100644 --- a/esphome/components/binary_sensor/automation.h +++ b/esphome/components/binary_sensor/automation.h @@ -2,11 +2,11 @@ #include #include -#include #include "esphome/core/component.h" #include "esphome/core/automation.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/binary_sensor.h" namespace esphome { @@ -92,8 +92,8 @@ class DoubleClickTrigger : public Trigger<> { class MultiClickTrigger : public Trigger<>, public Component { public: - explicit MultiClickTrigger(BinarySensor *parent, std::vector timing) - : parent_(parent), timing_(std::move(timing)) {} + explicit MultiClickTrigger(BinarySensor *parent, std::initializer_list timing) + : parent_(parent), timing_(timing) {} void setup() override { this->last_state_ = this->parent_->get_state_default(false); @@ -115,7 +115,7 @@ class MultiClickTrigger : public Trigger<>, public Component { void trigger_(); BinarySensor *parent_; - std::vector timing_; + FixedVector timing_; uint32_t invalid_cooldown_{1000}; optional at_index_{}; bool last_state_{false}; diff --git a/esphome/components/binary_sensor/binary_sensor.cpp b/esphome/components/binary_sensor/binary_sensor.cpp index 39319d3c1c..33b3de6d72 100644 --- a/esphome/components/binary_sensor/binary_sensor.cpp +++ b/esphome/components/binary_sensor/binary_sensor.cpp @@ -51,7 +51,7 @@ void BinarySensor::add_filter(Filter *filter) { last_filter->next_ = filter; } } -void BinarySensor::add_filters(const std::vector &filters) { +void BinarySensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } diff --git a/esphome/components/binary_sensor/binary_sensor.h b/esphome/components/binary_sensor/binary_sensor.h index 2bd17d97c9..c1661d710f 100644 --- a/esphome/components/binary_sensor/binary_sensor.h +++ b/esphome/components/binary_sensor/binary_sensor.h @@ -4,7 +4,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/binary_sensor/filter.h" -#include +#include namespace esphome { @@ -48,7 +48,7 @@ class BinarySensor : public StatefulEntityBase, public EntityBase_DeviceCl void publish_initial_state(bool new_state); void add_filter(Filter *filter); - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); // ========== INTERNAL METHODS ========== // (In most use cases you won't need these) diff --git a/esphome/components/binary_sensor/filter.cpp b/esphome/components/binary_sensor/filter.cpp index 3567e9c72b..8f31cf6fc2 100644 --- a/esphome/components/binary_sensor/filter.cpp +++ b/esphome/components/binary_sensor/filter.cpp @@ -1,7 +1,6 @@ #include "filter.h" #include "binary_sensor.h" -#include namespace esphome { @@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD optional InvertFilter::new_value(bool value) { return !value; } -AutorepeatFilter::AutorepeatFilter(std::vector timings) : timings_(std::move(timings)) {} +AutorepeatFilter::AutorepeatFilter(std::initializer_list timings) : timings_(timings) {} optional AutorepeatFilter::new_value(bool value) { if (value) { diff --git a/esphome/components/binary_sensor/filter.h b/esphome/components/binary_sensor/filter.h index 16f44aa5fe..a7eb080feb 100644 --- a/esphome/components/binary_sensor/filter.h +++ b/esphome/components/binary_sensor/filter.h @@ -4,8 +4,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include - namespace esphome { namespace binary_sensor { @@ -82,11 +80,6 @@ class InvertFilter : public Filter { }; struct AutorepeatFilterTiming { - AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) { - this->delay = delay; - this->time_off = off; - this->time_on = on; - } uint32_t delay; uint32_t time_off; uint32_t time_on; @@ -94,7 +87,7 @@ struct AutorepeatFilterTiming { class AutorepeatFilter : public Filter, public Component { public: - explicit AutorepeatFilter(std::vector timings); + explicit AutorepeatFilter(std::initializer_list timings); optional new_value(bool value) override; @@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component { void next_timing_(); void next_value_(bool val); - std::vector timings_; + FixedVector timings_; uint8_t active_timing_{0}; }; diff --git a/esphome/components/ble_nus/__init__.py b/esphome/components/ble_nus/__init__.py new file mode 100644 index 0000000000..9570005902 --- /dev/null +++ b/esphome/components/ble_nus/__init__.py @@ -0,0 +1,29 @@ +import esphome.codegen as cg +from esphome.components.zephyr import zephyr_add_prj_conf +import esphome.config_validation as cv +from esphome.const import CONF_ID, CONF_LOGS, CONF_TYPE + +AUTO_LOAD = ["zephyr_ble_server"] +CODEOWNERS = ["@tomaszduda23"] + +ble_nus_ns = cg.esphome_ns.namespace("ble_nus") +BLENUS = ble_nus_ns.class_("BLENUS", cg.Component) + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(BLENUS), + cv.Optional(CONF_TYPE, default=CONF_LOGS): cv.one_of( + *[CONF_LOGS], lower=True + ), + } + ).extend(cv.COMPONENT_SCHEMA), + cv.only_with_framework("zephyr"), +) + + +async def to_code(config): + var = cg.new_Pvariable(config[CONF_ID]) + zephyr_add_prj_conf("BT_NUS", True) + cg.add(var.set_expose_log(config[CONF_TYPE] == CONF_LOGS)) + await cg.register_component(var, config) diff --git a/esphome/components/ble_nus/ble_nus.cpp b/esphome/components/ble_nus/ble_nus.cpp new file mode 100644 index 0000000000..9c4d0a3938 --- /dev/null +++ b/esphome/components/ble_nus/ble_nus.cpp @@ -0,0 +1,157 @@ +#ifdef USE_ZEPHYR +#include "ble_nus.h" +#include +#include +#include "esphome/core/log.h" +#ifdef USE_LOGGER +#include "esphome/components/logger/logger.h" +#include "esphome/core/application.h" +#endif +#include + +namespace esphome::ble_nus { + +constexpr size_t BLE_TX_BUF_SIZE = 2048; + +// NOLINTBEGIN(cppcoreguidelines-avoid-non-const-global-variables) +BLENUS *global_ble_nus; +RING_BUF_DECLARE(global_ble_tx_ring_buf, BLE_TX_BUF_SIZE); +// NOLINTEND(cppcoreguidelines-avoid-non-const-global-variables) + +static const char *const TAG = "ble_nus"; + +size_t BLENUS::write_array(const uint8_t *data, size_t len) { + if (atomic_get(&this->tx_status_) == TX_DISABLED) { + return 0; + } + return ring_buf_put(&global_ble_tx_ring_buf, data, len); +} + +void BLENUS::connected(bt_conn *conn, uint8_t err) { + if (err == 0) { + global_ble_nus->conn_.store(bt_conn_ref(conn)); + } +} + +void BLENUS::disconnected(bt_conn *conn, uint8_t reason) { + if (global_ble_nus->conn_) { + bt_conn_unref(global_ble_nus->conn_.load()); + // Connection array is global static. + // Reference can be kept even if disconnected. + } +} + +void BLENUS::tx_callback(bt_conn *conn) { + atomic_cas(&global_ble_nus->tx_status_, TX_BUSY, TX_ENABLED); + ESP_LOGVV(TAG, "Sent operation completed"); +} + +void BLENUS::send_enabled_callback(bt_nus_send_status status) { + switch (status) { + case BT_NUS_SEND_STATUS_ENABLED: + atomic_set(&global_ble_nus->tx_status_, TX_ENABLED); +#ifdef USE_LOGGER + if (global_ble_nus->expose_log_) { + App.schedule_dump_config(); + } +#endif + ESP_LOGD(TAG, "NUS notification has been enabled"); + break; + case BT_NUS_SEND_STATUS_DISABLED: + atomic_set(&global_ble_nus->tx_status_, TX_DISABLED); + ESP_LOGD(TAG, "NUS notification has been disabled"); + break; + } +} + +void BLENUS::rx_callback(bt_conn *conn, const uint8_t *const data, uint16_t len) { + ESP_LOGD(TAG, "Received %d bytes.", len); +} + +void BLENUS::setup() { + bt_nus_cb callbacks = { + .received = rx_callback, + .sent = tx_callback, + .send_enabled = send_enabled_callback, + }; + + bt_nus_init(&callbacks); + + static bt_conn_cb conn_callbacks = { + .connected = BLENUS::connected, + .disconnected = BLENUS::disconnected, + }; + + bt_conn_cb_register(&conn_callbacks); + + global_ble_nus = this; +#ifdef USE_LOGGER + if (logger::global_logger != nullptr && this->expose_log_) { + logger::global_logger->add_on_log_callback( + [this](int level, const char *tag, const char *message, size_t message_len) { + this->write_array(reinterpret_cast(message), message_len); + const char c = '\n'; + this->write_array(reinterpret_cast(&c), 1); + }); + } + +#endif +} + +void BLENUS::dump_config() { + ESP_LOGCONFIG(TAG, "ble nus:"); + ESP_LOGCONFIG(TAG, " log: %s", YESNO(this->expose_log_)); + uint32_t mtu = 0; + bt_conn *conn = this->conn_.load(); + if (conn) { + mtu = bt_nus_get_mtu(conn); + } + ESP_LOGCONFIG(TAG, " MTU: %u", mtu); +} + +void BLENUS::loop() { + if (ring_buf_is_empty(&global_ble_tx_ring_buf)) { + return; + } + + if (!atomic_cas(&this->tx_status_, TX_ENABLED, TX_BUSY)) { + if (atomic_get(&this->tx_status_) == TX_DISABLED) { + ring_buf_reset(&global_ble_tx_ring_buf); + } + return; + } + + bt_conn *conn = this->conn_.load(); + if (conn) { + conn = bt_conn_ref(conn); + } + + if (nullptr == conn) { + atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED); + return; + } + + uint32_t req_len = bt_nus_get_mtu(conn); + + uint8_t *buf; + uint32_t size = ring_buf_get_claim(&global_ble_tx_ring_buf, &buf, req_len); + + int err, err2; + + err = bt_nus_send(conn, buf, size); + err2 = ring_buf_get_finish(&global_ble_tx_ring_buf, size); + if (err2) { + // It should no happen. + ESP_LOGE(TAG, "Size %u exceeds valid bytes in the ring buffer (%d error)", size, err2); + } + if (err == 0) { + ESP_LOGVV(TAG, "Sent %d bytes", size); + } else { + ESP_LOGE(TAG, "Failed to send %d bytes (%d error)", size, err); + atomic_cas(&this->tx_status_, TX_BUSY, TX_ENABLED); + } + bt_conn_unref(conn); +} + +} // namespace esphome::ble_nus +#endif diff --git a/esphome/components/ble_nus/ble_nus.h b/esphome/components/ble_nus/ble_nus.h new file mode 100644 index 0000000000..e8cba32b4c --- /dev/null +++ b/esphome/components/ble_nus/ble_nus.h @@ -0,0 +1,37 @@ +#pragma once +#ifdef USE_ZEPHYR +#include "esphome/core/defines.h" +#include "esphome/core/component.h" +#include +#include + +namespace esphome::ble_nus { + +class BLENUS : public Component { + enum TxStatus { + TX_DISABLED, + TX_ENABLED, + TX_BUSY, + }; + + public: + void setup() override; + void dump_config() override; + void loop() override; + size_t write_array(const uint8_t *data, size_t len); + void set_expose_log(bool expose_log) { this->expose_log_ = expose_log; } + + protected: + static void send_enabled_callback(bt_nus_send_status status); + static void tx_callback(bt_conn *conn); + static void rx_callback(bt_conn *conn, const uint8_t *data, uint16_t len); + static void connected(bt_conn *conn, uint8_t err); + static void disconnected(bt_conn *conn, uint8_t reason); + + std::atomic conn_ = nullptr; + bool expose_log_ = false; + atomic_t tx_status_ = ATOMIC_INIT(TX_DISABLED); +}; + +} // namespace esphome::ble_nus +#endif diff --git a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp index cd7261d5e5..34e0aa93a3 100644 --- a/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp +++ b/esphome/components/bluetooth_proxy/bluetooth_proxy.cpp @@ -155,16 +155,12 @@ esp32_ble_tracker::AdvertisementParserType BluetoothProxy::get_advertisement_par BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool reserve) { for (uint8_t i = 0; i < this->connection_count_; i++) { auto *connection = this->connections_[i]; - if (connection->get_address() == address) + uint64_t conn_addr = connection->get_address(); + + if (conn_addr == address) return connection; - } - if (!reserve) - return nullptr; - - for (uint8_t i = 0; i < this->connection_count_; i++) { - auto *connection = this->connections_[i]; - if (connection->get_address() == 0) { + if (reserve && conn_addr == 0) { connection->send_service_ = INIT_SENDING_SERVICES; connection->set_address(address); // All connections must start at INIT @@ -175,7 +171,6 @@ BluetoothConnection *BluetoothProxy::get_connection_(uint64_t address, bool rese return connection; } } - return nullptr; } diff --git a/esphome/components/climate/climate.cpp b/esphome/components/climate/climate.cpp index f3c93ed44e..19fe241729 100644 --- a/esphome/components/climate/climate.cpp +++ b/esphome/components/climate/climate.cpp @@ -6,6 +6,42 @@ namespace climate { static const char *const TAG = "climate"; +// Memory-efficient lookup tables +struct StringToUint8 { + const char *str; + const uint8_t value; +}; + +constexpr StringToUint8 CLIMATE_MODES_BY_STR[] = { + {"OFF", CLIMATE_MODE_OFF}, + {"AUTO", CLIMATE_MODE_AUTO}, + {"COOL", CLIMATE_MODE_COOL}, + {"HEAT", CLIMATE_MODE_HEAT}, + {"FAN_ONLY", CLIMATE_MODE_FAN_ONLY}, + {"DRY", CLIMATE_MODE_DRY}, + {"HEAT_COOL", CLIMATE_MODE_HEAT_COOL}, +}; + +constexpr StringToUint8 CLIMATE_FAN_MODES_BY_STR[] = { + {"ON", CLIMATE_FAN_ON}, {"OFF", CLIMATE_FAN_OFF}, {"AUTO", CLIMATE_FAN_AUTO}, + {"LOW", CLIMATE_FAN_LOW}, {"MEDIUM", CLIMATE_FAN_MEDIUM}, {"HIGH", CLIMATE_FAN_HIGH}, + {"MIDDLE", CLIMATE_FAN_MIDDLE}, {"FOCUS", CLIMATE_FAN_FOCUS}, {"DIFFUSE", CLIMATE_FAN_DIFFUSE}, + {"QUIET", CLIMATE_FAN_QUIET}, +}; + +constexpr StringToUint8 CLIMATE_PRESETS_BY_STR[] = { + {"ECO", CLIMATE_PRESET_ECO}, {"AWAY", CLIMATE_PRESET_AWAY}, {"BOOST", CLIMATE_PRESET_BOOST}, + {"COMFORT", CLIMATE_PRESET_COMFORT}, {"HOME", CLIMATE_PRESET_HOME}, {"SLEEP", CLIMATE_PRESET_SLEEP}, + {"ACTIVITY", CLIMATE_PRESET_ACTIVITY}, {"NONE", CLIMATE_PRESET_NONE}, +}; + +constexpr StringToUint8 CLIMATE_SWING_MODES_BY_STR[] = { + {"OFF", CLIMATE_SWING_OFF}, + {"BOTH", CLIMATE_SWING_BOTH}, + {"VERTICAL", CLIMATE_SWING_VERTICAL}, + {"HORIZONTAL", CLIMATE_SWING_HORIZONTAL}, +}; + void ClimateCall::perform() { this->parent_->control_callback_.call(*this); ESP_LOGD(TAG, "'%s' - Setting", this->parent_->get_name().c_str()); @@ -50,47 +86,46 @@ void ClimateCall::perform() { } this->parent_->control(*this); } + void ClimateCall::validate_() { auto traits = this->parent_->get_traits(); if (this->mode_.has_value()) { auto mode = *this->mode_; if (!traits.supports_mode(mode)) { - ESP_LOGW(TAG, " Mode %s is not supported by this device!", LOG_STR_ARG(climate_mode_to_string(mode))); + ESP_LOGW(TAG, " Mode %s not supported", LOG_STR_ARG(climate_mode_to_string(mode))); this->mode_.reset(); } } if (this->custom_fan_mode_.has_value()) { auto custom_fan_mode = *this->custom_fan_mode_; if (!traits.supports_custom_fan_mode(custom_fan_mode)) { - ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!", custom_fan_mode.c_str()); + ESP_LOGW(TAG, " Fan Mode %s not supported", custom_fan_mode.c_str()); this->custom_fan_mode_.reset(); } } else if (this->fan_mode_.has_value()) { auto fan_mode = *this->fan_mode_; if (!traits.supports_fan_mode(fan_mode)) { - ESP_LOGW(TAG, " Fan Mode %s is not supported by this device!", - LOG_STR_ARG(climate_fan_mode_to_string(fan_mode))); + ESP_LOGW(TAG, " Fan Mode %s not supported", LOG_STR_ARG(climate_fan_mode_to_string(fan_mode))); this->fan_mode_.reset(); } } if (this->custom_preset_.has_value()) { auto custom_preset = *this->custom_preset_; if (!traits.supports_custom_preset(custom_preset)) { - ESP_LOGW(TAG, " Preset %s is not supported by this device!", custom_preset.c_str()); + ESP_LOGW(TAG, " Preset %s not supported", custom_preset.c_str()); this->custom_preset_.reset(); } } else if (this->preset_.has_value()) { auto preset = *this->preset_; if (!traits.supports_preset(preset)) { - ESP_LOGW(TAG, " Preset %s is not supported by this device!", LOG_STR_ARG(climate_preset_to_string(preset))); + ESP_LOGW(TAG, " Preset %s not supported", LOG_STR_ARG(climate_preset_to_string(preset))); this->preset_.reset(); } } if (this->swing_mode_.has_value()) { auto swing_mode = *this->swing_mode_; if (!traits.supports_swing_mode(swing_mode)) { - ESP_LOGW(TAG, " Swing Mode %s is not supported by this device!", - LOG_STR_ARG(climate_swing_mode_to_string(swing_mode))); + ESP_LOGW(TAG, " Swing Mode %s not supported", LOG_STR_ARG(climate_swing_mode_to_string(swing_mode))); this->swing_mode_.reset(); } } @@ -99,159 +134,127 @@ void ClimateCall::validate_() { if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { ESP_LOGW(TAG, " Cannot set target temperature for climate device " - "with two-point target temperature!"); + "with two-point target temperature"); this->target_temperature_.reset(); } else if (std::isnan(target)) { - ESP_LOGW(TAG, " Target temperature must not be NAN!"); + ESP_LOGW(TAG, " Target temperature must not be NAN"); this->target_temperature_.reset(); } } if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) { if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { - ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!"); + ESP_LOGW(TAG, " Cannot set low/high target temperature"); this->target_temperature_low_.reset(); this->target_temperature_high_.reset(); } } if (this->target_temperature_low_.has_value() && std::isnan(*this->target_temperature_low_)) { - ESP_LOGW(TAG, " Target temperature low must not be NAN!"); + ESP_LOGW(TAG, " Target temperature low must not be NAN"); this->target_temperature_low_.reset(); } if (this->target_temperature_high_.has_value() && std::isnan(*this->target_temperature_high_)) { - ESP_LOGW(TAG, " Target temperature low must not be NAN!"); + ESP_LOGW(TAG, " Target temperature high must not be NAN"); this->target_temperature_high_.reset(); } if (this->target_temperature_low_.has_value() && this->target_temperature_high_.has_value()) { float low = *this->target_temperature_low_; float high = *this->target_temperature_high_; if (low > high) { - ESP_LOGW(TAG, " Target temperature low %.2f must be smaller than target temperature high %.2f!", low, high); + ESP_LOGW(TAG, " Target temperature low %.2f must be less than target temperature high %.2f", low, high); this->target_temperature_low_.reset(); this->target_temperature_high_.reset(); } } } + ClimateCall &ClimateCall::set_mode(ClimateMode mode) { this->mode_ = mode; return *this; } + ClimateCall &ClimateCall::set_mode(const std::string &mode) { - if (str_equals_case_insensitive(mode, "OFF")) { - this->set_mode(CLIMATE_MODE_OFF); - } else if (str_equals_case_insensitive(mode, "AUTO")) { - this->set_mode(CLIMATE_MODE_AUTO); - } else if (str_equals_case_insensitive(mode, "COOL")) { - this->set_mode(CLIMATE_MODE_COOL); - } else if (str_equals_case_insensitive(mode, "HEAT")) { - this->set_mode(CLIMATE_MODE_HEAT); - } else if (str_equals_case_insensitive(mode, "FAN_ONLY")) { - this->set_mode(CLIMATE_MODE_FAN_ONLY); - } else if (str_equals_case_insensitive(mode, "DRY")) { - this->set_mode(CLIMATE_MODE_DRY); - } else if (str_equals_case_insensitive(mode, "HEAT_COOL")) { - this->set_mode(CLIMATE_MODE_HEAT_COOL); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str()); + for (const auto &mode_entry : CLIMATE_MODES_BY_STR) { + if (str_equals_case_insensitive(mode, mode_entry.str)) { + this->set_mode(static_cast(mode_entry.value)); + return *this; + } } + ESP_LOGW(TAG, "'%s' - Unrecognized mode %s", this->parent_->get_name().c_str(), mode.c_str()); return *this; } + ClimateCall &ClimateCall::set_fan_mode(ClimateFanMode fan_mode) { this->fan_mode_ = fan_mode; this->custom_fan_mode_.reset(); return *this; } + ClimateCall &ClimateCall::set_fan_mode(const std::string &fan_mode) { - if (str_equals_case_insensitive(fan_mode, "ON")) { - this->set_fan_mode(CLIMATE_FAN_ON); - } else if (str_equals_case_insensitive(fan_mode, "OFF")) { - this->set_fan_mode(CLIMATE_FAN_OFF); - } else if (str_equals_case_insensitive(fan_mode, "AUTO")) { - this->set_fan_mode(CLIMATE_FAN_AUTO); - } else if (str_equals_case_insensitive(fan_mode, "LOW")) { - this->set_fan_mode(CLIMATE_FAN_LOW); - } else if (str_equals_case_insensitive(fan_mode, "MEDIUM")) { - this->set_fan_mode(CLIMATE_FAN_MEDIUM); - } else if (str_equals_case_insensitive(fan_mode, "HIGH")) { - this->set_fan_mode(CLIMATE_FAN_HIGH); - } else if (str_equals_case_insensitive(fan_mode, "MIDDLE")) { - this->set_fan_mode(CLIMATE_FAN_MIDDLE); - } else if (str_equals_case_insensitive(fan_mode, "FOCUS")) { - this->set_fan_mode(CLIMATE_FAN_FOCUS); - } else if (str_equals_case_insensitive(fan_mode, "DIFFUSE")) { - this->set_fan_mode(CLIMATE_FAN_DIFFUSE); - } else if (str_equals_case_insensitive(fan_mode, "QUIET")) { - this->set_fan_mode(CLIMATE_FAN_QUIET); - } else { - if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) { - this->custom_fan_mode_ = fan_mode; - this->fan_mode_.reset(); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str()); + for (const auto &mode_entry : CLIMATE_FAN_MODES_BY_STR) { + if (str_equals_case_insensitive(fan_mode, mode_entry.str)) { + this->set_fan_mode(static_cast(mode_entry.value)); + return *this; } } + if (this->parent_->get_traits().supports_custom_fan_mode(fan_mode)) { + this->custom_fan_mode_ = fan_mode; + this->fan_mode_.reset(); + } else { + ESP_LOGW(TAG, "'%s' - Unrecognized fan mode %s", this->parent_->get_name().c_str(), fan_mode.c_str()); + } return *this; } + ClimateCall &ClimateCall::set_fan_mode(optional fan_mode) { if (fan_mode.has_value()) { this->set_fan_mode(fan_mode.value()); } return *this; } + ClimateCall &ClimateCall::set_preset(ClimatePreset preset) { this->preset_ = preset; this->custom_preset_.reset(); return *this; } + ClimateCall &ClimateCall::set_preset(const std::string &preset) { - if (str_equals_case_insensitive(preset, "ECO")) { - this->set_preset(CLIMATE_PRESET_ECO); - } else if (str_equals_case_insensitive(preset, "AWAY")) { - this->set_preset(CLIMATE_PRESET_AWAY); - } else if (str_equals_case_insensitive(preset, "BOOST")) { - this->set_preset(CLIMATE_PRESET_BOOST); - } else if (str_equals_case_insensitive(preset, "COMFORT")) { - this->set_preset(CLIMATE_PRESET_COMFORT); - } else if (str_equals_case_insensitive(preset, "HOME")) { - this->set_preset(CLIMATE_PRESET_HOME); - } else if (str_equals_case_insensitive(preset, "SLEEP")) { - this->set_preset(CLIMATE_PRESET_SLEEP); - } else if (str_equals_case_insensitive(preset, "ACTIVITY")) { - this->set_preset(CLIMATE_PRESET_ACTIVITY); - } else if (str_equals_case_insensitive(preset, "NONE")) { - this->set_preset(CLIMATE_PRESET_NONE); - } else { - if (this->parent_->get_traits().supports_custom_preset(preset)) { - this->custom_preset_ = preset; - this->preset_.reset(); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str()); + for (const auto &preset_entry : CLIMATE_PRESETS_BY_STR) { + if (str_equals_case_insensitive(preset, preset_entry.str)) { + this->set_preset(static_cast(preset_entry.value)); + return *this; } } + if (this->parent_->get_traits().supports_custom_preset(preset)) { + this->custom_preset_ = preset; + this->preset_.reset(); + } else { + ESP_LOGW(TAG, "'%s' - Unrecognized preset %s", this->parent_->get_name().c_str(), preset.c_str()); + } return *this; } + ClimateCall &ClimateCall::set_preset(optional preset) { if (preset.has_value()) { this->set_preset(preset.value()); } return *this; } + ClimateCall &ClimateCall::set_swing_mode(ClimateSwingMode swing_mode) { this->swing_mode_ = swing_mode; return *this; } + ClimateCall &ClimateCall::set_swing_mode(const std::string &swing_mode) { - if (str_equals_case_insensitive(swing_mode, "OFF")) { - this->set_swing_mode(CLIMATE_SWING_OFF); - } else if (str_equals_case_insensitive(swing_mode, "BOTH")) { - this->set_swing_mode(CLIMATE_SWING_BOTH); - } else if (str_equals_case_insensitive(swing_mode, "VERTICAL")) { - this->set_swing_mode(CLIMATE_SWING_VERTICAL); - } else if (str_equals_case_insensitive(swing_mode, "HORIZONTAL")) { - this->set_swing_mode(CLIMATE_SWING_HORIZONTAL); - } else { - ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str()); + for (const auto &mode_entry : CLIMATE_SWING_MODES_BY_STR) { + if (str_equals_case_insensitive(swing_mode, mode_entry.str)) { + this->set_swing_mode(static_cast(mode_entry.value)); + return *this; + } } + ESP_LOGW(TAG, "'%s' - Unrecognized swing mode %s", this->parent_->get_name().c_str(), swing_mode.c_str()); return *this; } @@ -259,59 +262,71 @@ ClimateCall &ClimateCall::set_target_temperature(float target_temperature) { this->target_temperature_ = target_temperature; return *this; } + ClimateCall &ClimateCall::set_target_temperature_low(float target_temperature_low) { this->target_temperature_low_ = target_temperature_low; return *this; } + ClimateCall &ClimateCall::set_target_temperature_high(float target_temperature_high) { this->target_temperature_high_ = target_temperature_high; return *this; } + ClimateCall &ClimateCall::set_target_humidity(float target_humidity) { this->target_humidity_ = target_humidity; return *this; } -const optional &ClimateCall::get_mode() const { return this->mode_; } const optional &ClimateCall::get_target_temperature() const { return this->target_temperature_; } const optional &ClimateCall::get_target_temperature_low() const { return this->target_temperature_low_; } const optional &ClimateCall::get_target_temperature_high() const { return this->target_temperature_high_; } const optional &ClimateCall::get_target_humidity() const { return this->target_humidity_; } + +const optional &ClimateCall::get_mode() const { return this->mode_; } const optional &ClimateCall::get_fan_mode() const { return this->fan_mode_; } -const optional &ClimateCall::get_custom_fan_mode() const { return this->custom_fan_mode_; } -const optional &ClimateCall::get_preset() const { return this->preset_; } -const optional &ClimateCall::get_custom_preset() const { return this->custom_preset_; } const optional &ClimateCall::get_swing_mode() const { return this->swing_mode_; } +const optional &ClimateCall::get_preset() const { return this->preset_; } +const optional &ClimateCall::get_custom_fan_mode() const { return this->custom_fan_mode_; } +const optional &ClimateCall::get_custom_preset() const { return this->custom_preset_; } + ClimateCall &ClimateCall::set_target_temperature_high(optional target_temperature_high) { this->target_temperature_high_ = target_temperature_high; return *this; } + ClimateCall &ClimateCall::set_target_temperature_low(optional target_temperature_low) { this->target_temperature_low_ = target_temperature_low; return *this; } + ClimateCall &ClimateCall::set_target_temperature(optional target_temperature) { this->target_temperature_ = target_temperature; return *this; } + ClimateCall &ClimateCall::set_target_humidity(optional target_humidity) { this->target_humidity_ = target_humidity; return *this; } + ClimateCall &ClimateCall::set_mode(optional mode) { this->mode_ = mode; return *this; } + ClimateCall &ClimateCall::set_fan_mode(optional fan_mode) { this->fan_mode_ = fan_mode; this->custom_fan_mode_.reset(); return *this; } + ClimateCall &ClimateCall::set_preset(optional preset) { this->preset_ = preset; this->custom_preset_.reset(); return *this; } + ClimateCall &ClimateCall::set_swing_mode(optional swing_mode) { this->swing_mode_ = swing_mode; return *this; @@ -336,6 +351,7 @@ optional Climate::restore_state_() { return {}; return recovered; } + void Climate::save_state_() { #if (defined(USE_ESP_IDF) || (defined(USE_ESP8266) && USE_ARDUINO_VERSION_CODE >= VERSION_CODE(3, 0, 0))) && \ !defined(CLANG_TIDY) @@ -369,12 +385,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) { state.uses_custom_fan_mode = true; const auto &supported = traits.get_supported_custom_fan_modes(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_fan_mode) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &mode : supported) { + if (mode == custom_fan_mode) { state.custom_fan_mode = i; break; } + i++; } } if (traits.get_supports_presets() && preset.has_value()) { @@ -384,12 +402,14 @@ void Climate::save_state_() { if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) { state.uses_custom_preset = true; const auto &supported = traits.get_supported_custom_presets(); - std::vector vec{supported.begin(), supported.end()}; - for (size_t i = 0; i < vec.size(); i++) { - if (vec[i] == custom_preset) { + // std::set has consistent order (lexicographic for strings) + size_t i = 0; + for (const auto &preset : supported) { + if (preset == custom_preset) { state.custom_preset = i; break; } + i++; } } if (traits.get_supports_swing_modes()) { @@ -398,6 +418,7 @@ void Climate::save_state_() { this->rtc_.save(&state); } + void Climate::publish_state() { ESP_LOGD(TAG, "'%s' - Sending state:", this->name_.c_str()); auto traits = this->get_traits(); @@ -469,16 +490,20 @@ ClimateTraits Climate::get_traits() { void Climate::set_visual_min_temperature_override(float visual_min_temperature_override) { this->visual_min_temperature_override_ = visual_min_temperature_override; } + void Climate::set_visual_max_temperature_override(float visual_max_temperature_override) { this->visual_max_temperature_override_ = visual_max_temperature_override; } + void Climate::set_visual_temperature_step_override(float target, float current) { this->visual_target_temperature_step_override_ = target; this->visual_current_temperature_step_override_ = current; } + void Climate::set_visual_min_humidity_override(float visual_min_humidity_override) { this->visual_min_humidity_override_ = visual_min_humidity_override; } + void Climate::set_visual_max_humidity_override(float visual_max_humidity_override) { this->visual_max_humidity_override_ = visual_max_humidity_override; } @@ -499,17 +524,28 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) { if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { call.set_target_humidity(this->target_humidity); } - if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) { + if (this->uses_custom_fan_mode) { + if (this->custom_fan_mode < traits.get_supported_custom_fan_modes().size()) { + call.fan_mode_.reset(); + call.custom_fan_mode_ = *std::next(traits.get_supported_custom_fan_modes().cbegin(), this->custom_fan_mode); + } + } else if (traits.supports_fan_mode(this->fan_mode)) { call.set_fan_mode(this->fan_mode); } - if (traits.get_supports_presets() || !traits.get_supported_custom_presets().empty()) { + if (this->uses_custom_preset) { + if (this->custom_preset < traits.get_supported_custom_presets().size()) { + call.preset_.reset(); + call.custom_preset_ = *std::next(traits.get_supported_custom_presets().cbegin(), this->custom_preset); + } + } else if (traits.supports_preset(this->preset)) { call.set_preset(this->preset); } - if (traits.get_supports_swing_modes()) { + if (traits.supports_swing_mode(this->swing_mode)) { call.set_swing_mode(this->swing_mode); } return call; } + void ClimateDeviceRestoreState::apply(Climate *climate) { auto traits = climate->get_traits(); climate->mode = this->mode; @@ -523,29 +559,25 @@ void ClimateDeviceRestoreState::apply(Climate *climate) { if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { climate->target_humidity = this->target_humidity; } - if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) { + if (this->uses_custom_fan_mode) { + if (this->custom_fan_mode < traits.get_supported_custom_fan_modes().size()) { + climate->fan_mode.reset(); + climate->custom_fan_mode = *std::next(traits.get_supported_custom_fan_modes().cbegin(), this->custom_fan_mode); + } + } else if (traits.supports_fan_mode(this->fan_mode)) { climate->fan_mode = this->fan_mode; + climate->custom_fan_mode.reset(); } - if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) { - // std::set has consistent order (lexicographic for strings), so this is ok - const auto &modes = traits.get_supported_custom_fan_modes(); - std::vector modes_vec{modes.begin(), modes.end()}; - if (custom_fan_mode < modes_vec.size()) { - climate->custom_fan_mode = modes_vec[this->custom_fan_mode]; + if (this->uses_custom_preset) { + if (this->custom_preset < traits.get_supported_custom_presets().size()) { + climate->preset.reset(); + climate->custom_preset = *std::next(traits.get_supported_custom_presets().cbegin(), this->custom_preset); } - } - if (traits.get_supports_presets() && !this->uses_custom_preset) { + } else if (traits.supports_preset(this->preset)) { climate->preset = this->preset; + climate->custom_preset.reset(); } - if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) { - // std::set has consistent order (lexicographic for strings), so this is ok - const auto &presets = traits.get_supported_custom_presets(); - std::vector presets_vec{presets.begin(), presets.end()}; - if (custom_preset < presets_vec.size()) { - climate->custom_preset = presets_vec[this->custom_preset]; - } - } - if (traits.get_supports_swing_modes()) { + if (traits.supports_swing_mode(this->swing_mode)) { climate->swing_mode = this->swing_mode; } climate->publish_state(); @@ -579,68 +611,68 @@ void Climate::dump_traits_(const char *tag) { auto traits = this->get_traits(); ESP_LOGCONFIG(tag, "ClimateTraits:"); ESP_LOGCONFIG(tag, - " [x] Visual settings:\n" - " - Min temperature: %.1f\n" - " - Max temperature: %.1f\n" - " - Temperature step:\n" - " Target: %.1f", + " Visual settings:\n" + " - Min temperature: %.1f\n" + " - Max temperature: %.1f\n" + " - Temperature step:\n" + " Target: %.1f", traits.get_visual_min_temperature(), traits.get_visual_max_temperature(), traits.get_visual_target_temperature_step()); if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step()); + ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step()); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY | climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { ESP_LOGCONFIG(tag, - " - Min humidity: %.0f\n" - " - Max humidity: %.0f", + " - Min humidity: %.0f\n" + " - Max humidity: %.0f", traits.get_visual_min_humidity(), traits.get_visual_max_humidity()); } if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE | CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature"); + ESP_LOGCONFIG(tag, " Supports two-point target temperature"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) { - ESP_LOGCONFIG(tag, " [x] Supports current temperature"); + ESP_LOGCONFIG(tag, " Supports current temperature"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) { - ESP_LOGCONFIG(tag, " [x] Supports target humidity"); + ESP_LOGCONFIG(tag, " Supports target humidity"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) { - ESP_LOGCONFIG(tag, " [x] Supports current humidity"); + ESP_LOGCONFIG(tag, " Supports current humidity"); } if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) { - ESP_LOGCONFIG(tag, " [x] Supports action"); + ESP_LOGCONFIG(tag, " Supports action"); } if (!traits.get_supported_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported modes:"); + ESP_LOGCONFIG(tag, " Supported modes:"); for (ClimateMode m : traits.get_supported_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_mode_to_string(m))); } if (!traits.get_supported_fan_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported fan modes:"); + ESP_LOGCONFIG(tag, " Supported fan modes:"); for (ClimateFanMode m : traits.get_supported_fan_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_fan_mode_to_string(m))); } if (!traits.get_supported_custom_fan_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported custom fan modes:"); + ESP_LOGCONFIG(tag, " Supported custom fan modes:"); for (const std::string &s : traits.get_supported_custom_fan_modes()) - ESP_LOGCONFIG(tag, " - %s", s.c_str()); + ESP_LOGCONFIG(tag, " - %s", s.c_str()); } if (!traits.get_supported_presets().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported presets:"); + ESP_LOGCONFIG(tag, " Supported presets:"); for (ClimatePreset p : traits.get_supported_presets()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_preset_to_string(p))); } if (!traits.get_supported_custom_presets().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported custom presets:"); + ESP_LOGCONFIG(tag, " Supported custom presets:"); for (const std::string &s : traits.get_supported_custom_presets()) - ESP_LOGCONFIG(tag, " - %s", s.c_str()); + ESP_LOGCONFIG(tag, " - %s", s.c_str()); } if (!traits.get_supported_swing_modes().empty()) { - ESP_LOGCONFIG(tag, " [x] Supported swing modes:"); + ESP_LOGCONFIG(tag, " Supported swing modes:"); for (ClimateSwingMode m : traits.get_supported_swing_modes()) - ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m))); + ESP_LOGCONFIG(tag, " - %s", LOG_STR_ARG(climate_swing_mode_to_string(m))); } } diff --git a/esphome/components/climate/climate.h b/esphome/components/climate/climate.h index b31a2eedf6..0c3e3ebe16 100644 --- a/esphome/components/climate/climate.h +++ b/esphome/components/climate/climate.h @@ -33,6 +33,7 @@ class Climate; class ClimateCall { public: explicit ClimateCall(Climate *parent) : parent_(parent) {} + friend struct ClimateDeviceRestoreState; /// Set the mode of the climate device. ClimateCall &set_mode(ClimateMode mode); @@ -93,30 +94,31 @@ class ClimateCall { void perform(); - const optional &get_mode() const; const optional &get_target_temperature() const; const optional &get_target_temperature_low() const; const optional &get_target_temperature_high() const; const optional &get_target_humidity() const; + + const optional &get_mode() const; const optional &get_fan_mode() const; const optional &get_swing_mode() const; - const optional &get_custom_fan_mode() const; const optional &get_preset() const; + const optional &get_custom_fan_mode() const; const optional &get_custom_preset() const; protected: void validate_(); Climate *const parent_; - optional mode_; optional target_temperature_; optional target_temperature_low_; optional target_temperature_high_; optional target_humidity_; + optional mode_; optional fan_mode_; optional swing_mode_; - optional custom_fan_mode_; optional preset_; + optional custom_fan_mode_; optional custom_preset_; }; @@ -169,47 +171,6 @@ class Climate : public EntityBase { public: Climate() {} - /// The active mode of the climate device. - ClimateMode mode{CLIMATE_MODE_OFF}; - - /// The active state of the climate device. - ClimateAction action{CLIMATE_ACTION_OFF}; - - /// The current temperature of the climate device, as reported from the integration. - float current_temperature{NAN}; - - /// The current humidity of the climate device, as reported from the integration. - float current_humidity{NAN}; - - union { - /// The target temperature of the climate device. - float target_temperature; - struct { - /// The minimum target temperature of the climate device, for climate devices with split target temperature. - float target_temperature_low{NAN}; - /// The maximum target temperature of the climate device, for climate devices with split target temperature. - float target_temperature_high{NAN}; - }; - }; - - /// The target humidity of the climate device. - float target_humidity; - - /// The active fan mode of the climate device. - optional fan_mode; - - /// The active swing mode of the climate device. - ClimateSwingMode swing_mode; - - /// The active custom fan mode of the climate device. - optional custom_fan_mode; - - /// The active preset of the climate device. - optional preset; - - /// The active custom preset mode of the climate device. - optional custom_preset; - /** Add a callback for the climate device state, each time the state of the climate device is updated * (using publish_state), this callback will be called. * @@ -251,6 +212,47 @@ class Climate : public EntityBase { void set_visual_min_humidity_override(float visual_min_humidity_override); void set_visual_max_humidity_override(float visual_max_humidity_override); + /// The current temperature of the climate device, as reported from the integration. + float current_temperature{NAN}; + + /// The current humidity of the climate device, as reported from the integration. + float current_humidity{NAN}; + + union { + /// The target temperature of the climate device. + float target_temperature; + struct { + /// The minimum target temperature of the climate device, for climate devices with split target temperature. + float target_temperature_low{NAN}; + /// The maximum target temperature of the climate device, for climate devices with split target temperature. + float target_temperature_high{NAN}; + }; + }; + + /// The target humidity of the climate device. + float target_humidity; + + /// The active fan mode of the climate device. + optional fan_mode; + + /// The active preset of the climate device. + optional preset; + + /// The active custom fan mode of the climate device. + optional custom_fan_mode; + + /// The active custom preset mode of the climate device. + optional custom_preset; + + /// The active mode of the climate device. + ClimateMode mode{CLIMATE_MODE_OFF}; + + /// The active state of the climate device. + ClimateAction action{CLIMATE_ACTION_OFF}; + + /// The active swing mode of the climate device. + ClimateSwingMode swing_mode{CLIMATE_SWING_OFF}; + protected: friend ClimateCall; diff --git a/esphome/components/climate/climate_traits.h b/esphome/components/climate/climate_traits.h index 50c1e79ad2..2962a147d7 100644 --- a/esphome/components/climate/climate_traits.h +++ b/esphome/components/climate/climate_traits.h @@ -1,8 +1,8 @@ #pragma once -#include "esphome/core/helpers.h" -#include "climate_mode.h" #include +#include "climate_mode.h" +#include "esphome/core/helpers.h" namespace esphome { @@ -109,44 +109,12 @@ class ClimateTraits { void set_supported_modes(std::set modes) { this->supported_modes_ = std::move(modes); } void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_auto_mode(bool supports_auto_mode) { set_mode_support_(CLIMATE_MODE_AUTO, supports_auto_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_cool_mode(bool supports_cool_mode) { set_mode_support_(CLIMATE_MODE_COOL, supports_cool_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_heat_mode(bool supports_heat_mode) { set_mode_support_(CLIMATE_MODE_HEAT, supports_heat_mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_heat_cool_mode(bool supported) { set_mode_support_(CLIMATE_MODE_HEAT_COOL, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_fan_only_mode(bool supports_fan_only_mode) { - set_mode_support_(CLIMATE_MODE_FAN_ONLY, supports_fan_only_mode); - } - ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20") - void set_supports_dry_mode(bool supports_dry_mode) { set_mode_support_(CLIMATE_MODE_DRY, supports_dry_mode); } bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); } const std::set &get_supported_modes() const { return this->supported_modes_; } void set_supported_fan_modes(std::set modes) { this->supported_fan_modes_ = std::move(modes); } void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); } void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_on(bool supported) { set_fan_mode_support_(CLIMATE_FAN_ON, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_off(bool supported) { set_fan_mode_support_(CLIMATE_FAN_OFF, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_auto(bool supported) { set_fan_mode_support_(CLIMATE_FAN_AUTO, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_low(bool supported) { set_fan_mode_support_(CLIMATE_FAN_LOW, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_medium(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MEDIUM, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_high(bool supported) { set_fan_mode_support_(CLIMATE_FAN_HIGH, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_middle(bool supported) { set_fan_mode_support_(CLIMATE_FAN_MIDDLE, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_focus(bool supported) { set_fan_mode_support_(CLIMATE_FAN_FOCUS, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_fan_modes() instead", "v1.20") - void set_supports_fan_mode_diffuse(bool supported) { set_fan_mode_support_(CLIMATE_FAN_DIFFUSE, supported); } bool supports_fan_mode(ClimateFanMode fan_mode) const { return this->supported_fan_modes_.count(fan_mode); } bool get_supports_fan_modes() const { return !this->supported_fan_modes_.empty() || !this->supported_custom_fan_modes_.empty(); @@ -178,16 +146,6 @@ class ClimateTraits { void set_supported_swing_modes(std::set modes) { this->supported_swing_modes_ = std::move(modes); } void add_supported_swing_mode(ClimateSwingMode mode) { this->supported_swing_modes_.insert(mode); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_off(bool supported) { set_swing_mode_support_(CLIMATE_SWING_OFF, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_both(bool supported) { set_swing_mode_support_(CLIMATE_SWING_BOTH, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_vertical(bool supported) { set_swing_mode_support_(CLIMATE_SWING_VERTICAL, supported); } - ESPDEPRECATED("This method is deprecated, use set_supported_swing_modes() instead", "v1.20") - void set_supports_swing_mode_horizontal(bool supported) { - set_swing_mode_support_(CLIMATE_SWING_HORIZONTAL, supported); - } bool supports_swing_mode(ClimateSwingMode swing_mode) const { return this->supported_swing_modes_.count(swing_mode); } bool get_supports_swing_modes() const { return !this->supported_swing_modes_.empty(); } const std::set &get_supported_swing_modes() const { return this->supported_swing_modes_; } diff --git a/esphome/components/climate_ir/climate_ir.cpp b/esphome/components/climate_ir/climate_ir.cpp index dc8117f6ae..2b95792a6c 100644 --- a/esphome/components/climate_ir/climate_ir.cpp +++ b/esphome/components/climate_ir/climate_ir.cpp @@ -8,7 +8,10 @@ static const char *const TAG = "climate_ir"; climate::ClimateTraits ClimateIR::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } + traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL}); if (this->supports_cool_) traits.add_supported_mode(climate::CLIMATE_MODE_COOL); @@ -19,7 +22,6 @@ climate::ClimateTraits ClimateIR::traits() { if (this->supports_fan_only_) traits.add_supported_mode(climate::CLIMATE_MODE_FAN_ONLY); - traits.set_supports_two_point_target_temperature(false); traits.set_visual_min_temperature(this->minimum_temperature_); traits.set_visual_max_temperature(this->maximum_temperature_); traits.set_visual_temperature_step(this->temperature_step_); diff --git a/esphome/components/cover/cover.cpp b/esphome/components/cover/cover.cpp index 3378279371..654bb956a5 100644 --- a/esphome/components/cover/cover.cpp +++ b/esphome/components/cover/cover.cpp @@ -1,6 +1,6 @@ #include "cover.h" -#include "esphome/core/log.h" #include +#include "esphome/core/log.h" namespace esphome { namespace cover { @@ -144,21 +144,7 @@ CoverCall &CoverCall::set_stop(bool stop) { bool CoverCall::get_stop() const { return this->stop_; } CoverCall Cover::make_call() { return {this}; } -void Cover::open() { - auto call = this->make_call(); - call.set_command_open(); - call.perform(); -} -void Cover::close() { - auto call = this->make_call(); - call.set_command_close(); - call.perform(); -} -void Cover::stop() { - auto call = this->make_call(); - call.set_command_stop(); - call.perform(); -} + void Cover::add_on_state_callback(std::function &&f) { this->state_callback_.add(std::move(f)); } void Cover::publish_state(bool save) { this->position = clamp(this->position, 0.0f, 1.0f); diff --git a/esphome/components/cover/cover.h b/esphome/components/cover/cover.h index ada5953d57..d5db6cfb4f 100644 --- a/esphome/components/cover/cover.h +++ b/esphome/components/cover/cover.h @@ -4,6 +4,7 @@ #include "esphome/core/entity_base.h" #include "esphome/core/helpers.h" #include "esphome/core/preferences.h" + #include "cover_traits.h" namespace esphome { @@ -125,25 +126,6 @@ class Cover : public EntityBase, public EntityBase_DeviceClass { /// Construct a new cover call used to control the cover. CoverCall make_call(); - /** Open the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - */ - ESPDEPRECATED("open() is deprecated, use make_call().set_command_open().perform() instead.", "2021.9") - void open(); - /** Close the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - */ - ESPDEPRECATED("close() is deprecated, use make_call().set_command_close().perform() instead.", "2021.9") - void close(); - /** Stop the cover. - * - * This is a legacy method and may be removed later, please use `.make_call()` instead. - * As per solution from issue #2885 the call should include perform() - */ - ESPDEPRECATED("stop() is deprecated, use make_call().set_command_stop().perform() instead.", "2021.9") - void stop(); void add_on_state_callback(std::function &&f); diff --git a/esphome/components/daikin_arc/daikin_arc.cpp b/esphome/components/daikin_arc/daikin_arc.cpp index 068819ecd1..f05342f482 100644 --- a/esphome/components/daikin_arc/daikin_arc.cpp +++ b/esphome/components/daikin_arc/daikin_arc.cpp @@ -241,9 +241,7 @@ uint8_t DaikinArcClimate::humidity_() { climate::ClimateTraits DaikinArcClimate::traits() { climate::ClimateTraits traits = climate_ir::ClimateIR::traits(); - traits.set_supports_current_temperature(true); - traits.set_supports_current_humidity(false); - traits.set_supports_target_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY); traits.set_visual_min_humidity(38); traits.set_visual_max_humidity(52); return traits; diff --git a/esphome/components/demo/demo_climate.h b/esphome/components/demo/demo_climate.h index 1ba80aabf5..84b16e7ec5 100644 --- a/esphome/components/demo/demo_climate.h +++ b/esphome/components/demo/demo_climate.h @@ -82,16 +82,14 @@ class DemoClimate : public climate::Climate, public Component { climate::ClimateTraits traits{}; switch (type_) { case DemoClimateType::TYPE_1: - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, }); - traits.set_supports_action(true); traits.set_visual_temperature_step(0.5); break; case DemoClimateType::TYPE_2: - traits.set_supports_current_temperature(false); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT, @@ -100,7 +98,7 @@ class DemoClimate : public climate::Climate, public Component { climate::CLIMATE_MODE_DRY, climate::CLIMATE_MODE_FAN_ONLY, }); - traits.set_supports_action(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_fan_modes({ climate::CLIMATE_FAN_ON, climate::CLIMATE_FAN_OFF, @@ -123,8 +121,8 @@ class DemoClimate : public climate::Climate, public Component { traits.set_supported_custom_presets({"My Preset"}); break; case DemoClimateType::TYPE_3: - traits.set_supports_current_temperature(true); - traits.set_supports_two_point_target_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | + climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE); traits.set_supported_modes({ climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_COOL, diff --git a/esphome/components/e131/e131.cpp b/esphome/components/e131/e131.cpp index a74fc9be4a..d18d945cec 100644 --- a/esphome/components/e131/e131.cpp +++ b/esphome/components/e131/e131.cpp @@ -80,8 +80,8 @@ void E131Component::add_effect(E131AddressableLightEffect *light_effect) { return; } - ESP_LOGD(TAG, "Registering '%s' for universes %d-%d.", light_effect->get_name().c_str(), - light_effect->get_first_universe(), light_effect->get_last_universe()); + ESP_LOGD(TAG, "Registering '%s' for universes %d-%d.", light_effect->get_name(), light_effect->get_first_universe(), + light_effect->get_last_universe()); light_effects_.insert(light_effect); @@ -95,8 +95,8 @@ void E131Component::remove_effect(E131AddressableLightEffect *light_effect) { return; } - ESP_LOGD(TAG, "Unregistering '%s' for universes %d-%d.", light_effect->get_name().c_str(), - light_effect->get_first_universe(), light_effect->get_last_universe()); + ESP_LOGD(TAG, "Unregistering '%s' for universes %d-%d.", light_effect->get_name(), light_effect->get_first_universe(), + light_effect->get_last_universe()); light_effects_.erase(light_effect); diff --git a/esphome/components/e131/e131_addressable_light_effect.cpp b/esphome/components/e131/e131_addressable_light_effect.cpp index 4d1f98ab6c..780e181f04 100644 --- a/esphome/components/e131/e131_addressable_light_effect.cpp +++ b/esphome/components/e131/e131_addressable_light_effect.cpp @@ -9,7 +9,7 @@ namespace e131 { static const char *const TAG = "e131_addressable_light_effect"; static const int MAX_DATA_SIZE = (sizeof(E131Packet::values) - 1); -E131AddressableLightEffect::E131AddressableLightEffect(const std::string &name) : AddressableLightEffect(name) {} +E131AddressableLightEffect::E131AddressableLightEffect(const char *name) : AddressableLightEffect(name) {} int E131AddressableLightEffect::get_data_per_universe() const { return get_lights_per_universe() * channels_; } @@ -58,8 +58,8 @@ bool E131AddressableLightEffect::process_(int universe, const E131Packet &packet std::min(it->size(), std::min(output_offset + get_lights_per_universe(), output_offset + packet.count - 1)); auto *input_data = packet.values + 1; - ESP_LOGV(TAG, "Applying data for '%s' on %d universe, for %" PRId32 "-%d.", get_name().c_str(), universe, - output_offset, output_end); + ESP_LOGV(TAG, "Applying data for '%s' on %d universe, for %" PRId32 "-%d.", get_name(), universe, output_offset, + output_end); switch (channels_) { case E131_MONO: diff --git a/esphome/components/e131/e131_addressable_light_effect.h b/esphome/components/e131/e131_addressable_light_effect.h index 17d7bd2829..381e08163b 100644 --- a/esphome/components/e131/e131_addressable_light_effect.h +++ b/esphome/components/e131/e131_addressable_light_effect.h @@ -13,7 +13,7 @@ enum E131LightChannels { E131_MONO = 1, E131_RGB = 3, E131_RGBW = 4 }; class E131AddressableLightEffect : public light::AddressableLightEffect { public: - E131AddressableLightEffect(const std::string &name); + E131AddressableLightEffect(const char *name); void start() override; void stop() override; diff --git a/esphome/components/epaper_spi/epaper_spi.cpp b/esphome/components/epaper_spi/epaper_spi.cpp index 21be4a2c05..9630ea7f8b 100644 --- a/esphome/components/epaper_spi/epaper_spi.cpp +++ b/esphome/components/epaper_spi/epaper_spi.cpp @@ -103,7 +103,7 @@ bool EPaperBase::is_idle_() { if (this->busy_pin_ == nullptr) { return true; } - return !this->busy_pin_->digital_read(); + return this->busy_pin_->digital_read(); } void EPaperBase::reset() { diff --git a/esphome/components/esp32/__init__.py b/esphome/components/esp32/__init__.py index b7dd25e0d8..48d11f46fa 100644 --- a/esphome/components/esp32/__init__.py +++ b/esphome/components/esp32/__init__.py @@ -1,3 +1,4 @@ +import contextlib from dataclasses import dataclass import itertools import logging @@ -102,6 +103,10 @@ COMPILER_OPTIMIZATIONS = { "SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE", } +# Socket limit configuration for ESP-IDF +# ESP-IDF CONFIG_LWIP_MAX_SOCKETS has range 1-253, default 10 +DEFAULT_MAX_SOCKETS = 10 # ESP-IDF default + ARDUINO_ALLOWED_VARIANTS = [ VARIANT_ESP32, VARIANT_ESP32C3, @@ -545,6 +550,32 @@ CONF_ENABLE_LWIP_BRIDGE_INTERFACE = "enable_lwip_bridge_interface" CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING = "enable_lwip_tcpip_core_locking" CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety" CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram" +CONF_DISABLE_VFS_SUPPORT_TERMIOS = "disable_vfs_support_termios" +CONF_DISABLE_VFS_SUPPORT_SELECT = "disable_vfs_support_select" +CONF_DISABLE_VFS_SUPPORT_DIR = "disable_vfs_support_dir" + +# VFS requirement tracking +# Components that need VFS features can call require_vfs_select() or require_vfs_dir() +KEY_VFS_SELECT_REQUIRED = "vfs_select_required" +KEY_VFS_DIR_REQUIRED = "vfs_dir_required" + + +def require_vfs_select() -> None: + """Mark that VFS select support is required by a component. + + Call this from components that use esp_vfs_eventfd or other VFS select features. + This prevents CONFIG_VFS_SUPPORT_SELECT from being disabled. + """ + CORE.data[KEY_VFS_SELECT_REQUIRED] = True + + +def require_vfs_dir() -> None: + """Mark that VFS directory support is required by a component. + + Call this from components that use directory functions (opendir, readdir, mkdir, etc.). + This prevents CONFIG_VFS_SUPPORT_DIR from being disabled. + """ + CORE.data[KEY_VFS_DIR_REQUIRED] = True def _validate_idf_component(config: ConfigType) -> ConfigType: @@ -610,6 +641,13 @@ FRAMEWORK_SCHEMA = cv.All( cv.Optional( CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True + ): cv.boolean, + cv.Optional( + CONF_DISABLE_VFS_SUPPORT_SELECT, default=True + ): cv.boolean, + cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean, cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean, } ), @@ -746,6 +784,72 @@ CONFIG_SCHEMA = cv.All( FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate) +def _configure_lwip_max_sockets(conf: dict) -> None: + """Calculate and set CONFIG_LWIP_MAX_SOCKETS based on component needs. + + Socket component tracks consumer needs via consume_sockets() called during config validation. + This function runs in to_code() after all components have registered their socket needs. + User-provided sdkconfig_options take precedence. + """ + from esphome.components.socket import KEY_SOCKET_CONSUMERS + + # Check if user manually specified CONFIG_LWIP_MAX_SOCKETS + user_max_sockets = conf.get(CONF_SDKCONFIG_OPTIONS, {}).get( + "CONFIG_LWIP_MAX_SOCKETS" + ) + + socket_consumers: dict[str, int] = CORE.data.get(KEY_SOCKET_CONSUMERS, {}) + total_sockets = sum(socket_consumers.values()) + + # Early return if no sockets registered and no user override + if total_sockets == 0 and user_max_sockets is None: + return + + components_list = ", ".join( + f"{name}={count}" for name, count in sorted(socket_consumers.items()) + ) + + # User specified their own value - respect it but warn if insufficient + if user_max_sockets is not None: + _LOGGER.info( + "Using user-provided CONFIG_LWIP_MAX_SOCKETS: %s", + user_max_sockets, + ) + + # Warn if user's value is less than what components need + if total_sockets > 0: + user_sockets_int = 0 + with contextlib.suppress(ValueError, TypeError): + user_sockets_int = int(user_max_sockets) + + if user_sockets_int < total_sockets: + _LOGGER.warning( + "CONFIG_LWIP_MAX_SOCKETS is set to %d but your configuration " + "needs %d sockets (registered: %s). You may experience socket " + "exhaustion errors. Consider increasing to at least %d.", + user_sockets_int, + total_sockets, + components_list, + total_sockets, + ) + # User's value already added via sdkconfig_options processing + return + + # Auto-calculate based on component needs + # Use at least the ESP-IDF default (10), or the total needed by components + max_sockets = max(DEFAULT_MAX_SOCKETS, total_sockets) + + log_level = logging.INFO if max_sockets > DEFAULT_MAX_SOCKETS else logging.DEBUG + _LOGGER.log( + log_level, + "Setting CONFIG_LWIP_MAX_SOCKETS to %d (registered: %s)", + max_sockets, + components_list, + ) + + add_idf_sdkconfig_option("CONFIG_LWIP_MAX_SOCKETS", max_sockets) + + async def to_code(config): cg.add_platformio_option("board", config[CONF_BOARD]) cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE]) @@ -773,12 +877,27 @@ async def to_code(config): for clean_var in ("IDF_PATH", "IDF_TOOLS_PATH"): os.environ.pop(clean_var, None) + # Set the location of the IDF component manager cache + os.environ["IDF_COMPONENT_CACHE_PATH"] = str( + CORE.relative_internal_path(".espressif") + ) + add_extra_script( "post", "post_build.py", Path(__file__).parent / "post_build.py.script", ) + # In testing mode, add IRAM fix script to allow linking grouped component tests + # Similar to ESP8266's approach but for ESP-IDF + if CORE.testing_mode: + cg.add_build_flag("-DESPHOME_TESTING_MODE") + add_extra_script( + "pre", + "iram_fix.py", + Path(__file__).parent / "iram_fix.py.script", + ) + if conf[CONF_TYPE] == FRAMEWORK_ESP_IDF: cg.add_platformio_option("framework", "espidf") cg.add_build_flag("-DUSE_ESP_IDF") @@ -805,6 +924,7 @@ async def to_code(config): add_idf_sdkconfig_option("CONFIG_AUTOSTART_ARDUINO", True) add_idf_sdkconfig_option("CONFIG_MBEDTLS_PSK_MODES", True) add_idf_sdkconfig_option("CONFIG_MBEDTLS_CERTIFICATE_BUNDLE", True) + add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True) cg.add_build_flag("-Wno-nonnull-compare") @@ -855,6 +975,9 @@ async def to_code(config): add_idf_sdkconfig_option("CONFIG_LWIP_DNS_SUPPORT_MDNS_QUERIES", False) if not advanced.get(CONF_ENABLE_LWIP_BRIDGE_INTERFACE, False): add_idf_sdkconfig_option("CONFIG_LWIP_BRIDGEIF_MAX_PORTS", 0) + + _configure_lwip_max_sockets(conf) + if advanced.get(CONF_EXECUTE_FROM_PSRAM, False): add_idf_sdkconfig_option("CONFIG_SPIRAM_FETCH_INSTRUCTIONS", True) add_idf_sdkconfig_option("CONFIG_SPIRAM_RODATA", True) @@ -877,6 +1000,43 @@ async def to_code(config): if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True): add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False) + # Disable VFS support for termios (terminal I/O functions) + # ESPHome doesn't use termios functions on ESP32 (only used in host UART driver). + # Saves approximately 1.8KB of flash when disabled (default). + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_TERMIOS", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True), + ) + + # Disable VFS support for select() with file descriptors + # ESPHome only uses select() with sockets via lwip_select(), which still works. + # VFS select is only needed for UART/eventfd file descriptors. + # Components that need it (e.g., openthread) call require_vfs_select(). + # Saves approximately 2.7KB of flash when disabled (default). + if CORE.data.get(KEY_VFS_SELECT_REQUIRED, False): + # Component requires VFS select - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_SELECT", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_SELECT", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True), + ) + + # Disable VFS support for directory functions (opendir, readdir, mkdir, etc.) + # ESPHome doesn't use directory functions on ESP32. + # Components that need it (e.g., storage components) call require_vfs_dir(). + # Saves approximately 0.5KB+ of flash when disabled (default). + if CORE.data.get(KEY_VFS_DIR_REQUIRED, False): + # Component requires VFS directory support - force enable regardless of user setting + add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_DIR", True) + else: + # No component needs it - allow user to control (default: disabled) + add_idf_sdkconfig_option( + "CONFIG_VFS_SUPPORT_DIR", + not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True), + ) + cg.add_platformio_option("board_build.partitions", "partitions.csv") if CONF_PARTITIONS in config: add_extra_build_file( diff --git a/esphome/components/esp32/iram_fix.py.script b/esphome/components/esp32/iram_fix.py.script new file mode 100644 index 0000000000..0d23f9a81b --- /dev/null +++ b/esphome/components/esp32/iram_fix.py.script @@ -0,0 +1,71 @@ +import os +import re + +# pylint: disable=E0602 +Import("env") # noqa + +# IRAM size for testing mode (2MB - large enough to accommodate grouped tests) +TESTING_IRAM_SIZE = 0x200000 + + +def patch_idf_linker_script(source, target, env): + """Patch ESP-IDF linker script to increase IRAM size for testing mode.""" + # Check if we're in testing mode by looking for the define + build_flags = env.get("BUILD_FLAGS", []) + testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + + if not testing_mode: + return + + # For ESP-IDF, the linker scripts are generated in the build directory + build_dir = env.subst("$BUILD_DIR") + + # The memory.ld file is directly in the build directory + memory_ld = os.path.join(build_dir, "memory.ld") + + if not os.path.exists(memory_ld): + print(f"ESPHome: Warning - could not find linker script at {memory_ld}") + return + + try: + with open(memory_ld, "r") as f: + content = f.read() + except OSError as e: + print(f"ESPHome: Error reading linker script: {e}") + return + + # Check if this file contains iram0_0_seg + if 'iram0_0_seg' not in content: + print(f"ESPHome: Warning - iram0_0_seg not found in {memory_ld}") + return + + # Look for iram0_0_seg definition and increase its length + # ESP-IDF format can be: + # iram0_0_seg (RX) : org = 0x40080000, len = 0x20000 + 0x0 + # or more complex with nested parentheses: + # iram0_0_seg (RX) : org = (0x40370000 + 0x4000), len = (((0x403CB700 - (0x40378000 - 0x3FC88000)) - 0x3FC88000) + 0x8000 - 0x4000) + # We want to change len to TESTING_IRAM_SIZE for testing + + # Use a more robust approach: find the line and manually parse it + lines = content.split('\n') + for i, line in enumerate(lines): + if 'iram0_0_seg' in line and 'len' in line: + # Find the position of "len = " and replace everything after it until the end of the statement + match = re.search(r'(iram0_0_seg\s*\([^)]*\)\s*:\s*org\s*=\s*(?:\([^)]+\)|0x[0-9a-fA-F]+)\s*,\s*len\s*=\s*)(.+?)(\s*)$', line) + if match: + lines[i] = f"{match.group(1)}{TESTING_IRAM_SIZE:#x}{match.group(3)}" + break + + updated = '\n'.join(lines) + + if updated != content: + with open(memory_ld, "w") as f: + f.write(updated) + print(f"ESPHome: Patched IRAM size to {TESTING_IRAM_SIZE:#x} in {memory_ld} for testing mode") + else: + print(f"ESPHome: Warning - could not patch iram0_0_seg in {memory_ld}") + + +# Hook into the build process before linking +# For ESP-IDF, we need to run this after the linker scripts are generated +env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_idf_linker_script) diff --git a/esphome/components/esp32_ble_client/ble_client_base.h b/esphome/components/esp32_ble_client/ble_client_base.h index f2edd6c2b3..7f0ae3b83e 100644 --- a/esphome/components/esp32_ble_client/ble_client_base.h +++ b/esphome/components/esp32_ble_client/ble_client_base.h @@ -61,12 +61,7 @@ class BLEClientBase : public espbt::ESPBTClient, public Component { this->address_str_ = ""; } else { char buf[18]; - uint8_t mac[6] = { - (uint8_t) ((this->address_ >> 40) & 0xff), (uint8_t) ((this->address_ >> 32) & 0xff), - (uint8_t) ((this->address_ >> 24) & 0xff), (uint8_t) ((this->address_ >> 16) & 0xff), - (uint8_t) ((this->address_ >> 8) & 0xff), (uint8_t) ((this->address_ >> 0) & 0xff), - }; - format_mac_addr_upper(mac, buf); + format_mac_addr_upper(this->remote_bda_, buf); this->address_str_ = buf; } } diff --git a/esphome/components/esp32_camera_web_server/__init__.py b/esphome/components/esp32_camera_web_server/__init__.py index a6a7ac3630..ed1aaa2e07 100644 --- a/esphome/components/esp32_camera_web_server/__init__.py +++ b/esphome/components/esp32_camera_web_server/__init__.py @@ -1,6 +1,7 @@ import esphome.codegen as cg import esphome.config_validation as cv from esphome.const import CONF_ID, CONF_MODE, CONF_PORT +from esphome.types import ConfigType CODEOWNERS = ["@ayufan"] AUTO_LOAD = ["camera"] @@ -13,13 +14,27 @@ Mode = esp32_camera_web_server_ns.enum("Mode") MODES = {"STREAM": Mode.STREAM, "SNAPSHOT": Mode.SNAPSHOT} -CONFIG_SCHEMA = cv.Schema( - { - cv.GenerateID(): cv.declare_id(CameraWebServer), - cv.Required(CONF_PORT): cv.port, - cv.Required(CONF_MODE): cv.enum(MODES, upper=True), - }, -).extend(cv.COMPONENT_SCHEMA) + +def _consume_camera_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for camera web server.""" + from esphome.components import socket + + # Each camera web server instance needs 1 listening socket + 2 client connections + sockets_needed = 3 + socket.consume_sockets(sockets_needed, "esp32_camera_web_server")(config) + return config + + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(CameraWebServer), + cv.Required(CONF_PORT): cv.port, + cv.Required(CONF_MODE): cv.enum(MODES, upper=True), + }, + ).extend(cv.COMPONENT_SCHEMA), + _consume_camera_web_server_sockets, +) async def to_code(config): diff --git a/esphome/components/esp32_hosted/__init__.py b/esphome/components/esp32_hosted/__init__.py index 7e9f1b05b5..fde75517eb 100644 --- a/esphome/components/esp32_hosted/__init__.py +++ b/esphome/components/esp32_hosted/__init__.py @@ -95,7 +95,7 @@ async def to_code(config): if framework_ver >= cv.Version(5, 5, 0): esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="1.1.5") esp32.add_idf_component(name="espressif/eppp_link", ref="1.1.3") - esp32.add_idf_component(name="espressif/esp_hosted", ref="2.5.11") + esp32.add_idf_component(name="espressif/esp_hosted", ref="2.6.1") else: esp32.add_idf_component(name="espressif/esp_wifi_remote", ref="0.13.0") esp32.add_idf_component(name="espressif/eppp_link", ref="0.2.0") diff --git a/esphome/components/esp32_improv/__init__.py b/esphome/components/esp32_improv/__init__.py index fa33bd947a..1a7194da81 100644 --- a/esphome/components/esp32_improv/__init__.py +++ b/esphome/components/esp32_improv/__init__.py @@ -1,11 +1,11 @@ from esphome import automation import esphome.codegen as cg -from esphome.components import binary_sensor, esp32_ble, output +from esphome.components import binary_sensor, esp32_ble, improv_base, output from esphome.components.esp32_ble import BTLoggers import esphome.config_validation as cv from esphome.const import CONF_ID, CONF_ON_STATE, CONF_TRIGGER_ID -AUTO_LOAD = ["esp32_ble_server"] +AUTO_LOAD = ["esp32_ble_server", "improv_base"] CODEOWNERS = ["@jesserockz"] DEPENDENCIES = ["wifi", "esp32"] @@ -20,6 +20,7 @@ CONF_ON_STOP = "on_stop" CONF_STATUS_INDICATOR = "status_indicator" CONF_WIFI_TIMEOUT = "wifi_timeout" + improv_ns = cg.esphome_ns.namespace("improv") Error = improv_ns.enum("Error") State = improv_ns.enum("State") @@ -43,55 +44,63 @@ ESP32ImprovStoppedTrigger = esp32_improv_ns.class_( ) -CONFIG_SCHEMA = cv.Schema( - { - cv.GenerateID(): cv.declare_id(ESP32ImprovComponent), - cv.Required(CONF_AUTHORIZER): cv.Any( - cv.none, cv.use_id(binary_sensor.BinarySensor) - ), - cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput), - cv.Optional( - CONF_IDENTIFY_DURATION, default="10s" - ): cv.positive_time_period_milliseconds, - cv.Optional( - CONF_AUTHORIZED_DURATION, default="1min" - ): cv.positive_time_period_milliseconds, - cv.Optional( - CONF_WIFI_TIMEOUT, default="1min" - ): cv.positive_time_period_milliseconds, - cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovProvisionedTrigger - ), - } - ), - cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovProvisioningTrigger - ), - } - ), - cv.Optional(CONF_ON_START): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStartTrigger), - } - ), - cv.Optional(CONF_ON_STATE): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(ESP32ImprovStateTrigger), - } - ), - cv.Optional(CONF_ON_STOP): automation.validate_automation( - { - cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( - ESP32ImprovStoppedTrigger - ), - } - ), - } -).extend(cv.COMPONENT_SCHEMA) +CONFIG_SCHEMA = ( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(ESP32ImprovComponent), + cv.Required(CONF_AUTHORIZER): cv.Any( + cv.none, cv.use_id(binary_sensor.BinarySensor) + ), + cv.Optional(CONF_STATUS_INDICATOR): cv.use_id(output.BinaryOutput), + cv.Optional( + CONF_IDENTIFY_DURATION, default="10s" + ): cv.positive_time_period_milliseconds, + cv.Optional( + CONF_AUTHORIZED_DURATION, default="1min" + ): cv.positive_time_period_milliseconds, + cv.Optional( + CONF_WIFI_TIMEOUT, default="1min" + ): cv.positive_time_period_milliseconds, + cv.Optional(CONF_ON_PROVISIONED): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovProvisionedTrigger + ), + } + ), + cv.Optional(CONF_ON_PROVISIONING): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovProvisioningTrigger + ), + } + ), + cv.Optional(CONF_ON_START): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStartTrigger + ), + } + ), + cv.Optional(CONF_ON_STATE): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStateTrigger + ), + } + ), + cv.Optional(CONF_ON_STOP): automation.validate_automation( + { + cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id( + ESP32ImprovStoppedTrigger + ), + } + ), + } + ) + .extend(improv_base.IMPROV_SCHEMA) + .extend(cv.COMPONENT_SCHEMA) +) async def to_code(config): @@ -102,7 +111,8 @@ async def to_code(config): await cg.register_component(var, config) cg.add_define("USE_IMPROV") - cg.add_library("improv/Improv", "1.2.4") + + await improv_base.setup_improv_core(var, config, "esp32_improv") cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION])) cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION])) diff --git a/esphome/components/esp32_improv/esp32_improv_component.cpp b/esphome/components/esp32_improv/esp32_improv_component.cpp index d83caf931b..56436b9d3d 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.cpp +++ b/esphome/components/esp32_improv/esp32_improv_component.cpp @@ -1,10 +1,10 @@ #include "esp32_improv_component.h" +#include "esphome/components/bytebuffer/bytebuffer.h" #include "esphome/components/esp32_ble/ble.h" #include "esphome/components/esp32_ble_server/ble_2902.h" #include "esphome/core/application.h" #include "esphome/core/log.h" -#include "esphome/components/bytebuffer/bytebuffer.h" #ifdef USE_ESP32 @@ -384,17 +384,34 @@ void ESP32ImprovComponent::check_wifi_connection_() { this->connecting_sta_ = {}; this->cancel_timeout("wifi-connect-timeout"); - std::vector urls = {ESPHOME_MY_LINK}; + // Build URL list with minimal allocations + // Maximum 3 URLs: custom next_url + ESPHOME_MY_LINK + webserver URL + std::string url_strings[3]; + size_t url_count = 0; + +#ifdef USE_ESP32_IMPROV_NEXT_URL + // Add next_url if configured (should be first per Improv BLE spec) + std::string next_url = this->get_formatted_next_url_(); + if (!next_url.empty()) { + url_strings[url_count++] = std::move(next_url); + } +#endif + + // Add default URLs for backward compatibility + url_strings[url_count++] = ESPHOME_MY_LINK; #ifdef USE_WEBSERVER for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) { if (ip.is_ip4()) { - std::string webserver_url = "http://" + ip.str() + ":" + to_string(USE_WEBSERVER_PORT); - urls.push_back(webserver_url); + char url_buffer[64]; + snprintf(url_buffer, sizeof(url_buffer), "http://%s:%d", ip.str().c_str(), USE_WEBSERVER_PORT); + url_strings[url_count++] = url_buffer; break; } } #endif - std::vector data = improv::build_rpc_response(improv::WIFI_SETTINGS, urls); + // Pass to build_rpc_response using vector constructor from iterators to avoid extra copies + std::vector data = improv::build_rpc_response( + improv::WIFI_SETTINGS, std::vector(url_strings, url_strings + url_count)); this->send_response_(data); } else if (this->is_active() && this->state_ != improv::STATE_PROVISIONED) { ESP_LOGD(TAG, "WiFi provisioned externally"); diff --git a/esphome/components/esp32_improv/esp32_improv_component.h b/esphome/components/esp32_improv/esp32_improv_component.h index 6782430ffe..fd3b2b861d 100644 --- a/esphome/components/esp32_improv/esp32_improv_component.h +++ b/esphome/components/esp32_improv/esp32_improv_component.h @@ -7,6 +7,7 @@ #include "esphome/components/esp32_ble_server/ble_characteristic.h" #include "esphome/components/esp32_ble_server/ble_server.h" +#include "esphome/components/improv_base/improv_base.h" #include "esphome/components/wifi/wifi_component.h" #ifdef USE_ESP32_IMPROV_STATE_CALLBACK @@ -32,7 +33,7 @@ namespace esp32_improv { using namespace esp32_ble_server; -class ESP32ImprovComponent : public Component { +class ESP32ImprovComponent : public Component, public improv_base::ImprovBase { public: ESP32ImprovComponent(); void dump_config() override; diff --git a/esphome/components/esp8266/__init__.py b/esphome/components/esp8266/__init__.py index 9d8e6b7d1e..a74f9ee8ce 100644 --- a/esphome/components/esp8266/__init__.py +++ b/esphome/components/esp8266/__init__.py @@ -190,7 +190,9 @@ async def to_code(config): cg.add_define("ESPHOME_VARIANT", "ESP8266") cg.add_define(ThreadModel.SINGLE) - cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"]) + cg.add_platformio_option( + "extra_scripts", ["pre:testing_mode.py", "post:post_build.py"] + ) conf = config[CONF_FRAMEWORK] cg.add_platformio_option("framework", "arduino") @@ -230,9 +232,9 @@ async def to_code(config): # For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;` cg.add_build_flag("-DNEW_OOM_ABORT") - # In testing mode, fake a larger IRAM to allow linking grouped component tests - # Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB - # This is done via a pre-build script that generates a custom linker script + # In testing mode, fake larger memory to allow linking grouped component tests + # Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing + # we pretend it has much larger memory to test that components compile together if CORE.testing_mode: cg.add_build_flag("-DESPHOME_TESTING_MODE") @@ -271,8 +273,8 @@ def copy_files(): post_build_file, CORE.relative_build_path("post_build.py"), ) - iram_fix_file = dir / "iram_fix.py.script" + testing_mode_file = dir / "testing_mode.py.script" copy_file_if_changed( - iram_fix_file, - CORE.relative_build_path("iram_fix.py"), + testing_mode_file, + CORE.relative_build_path("testing_mode.py"), ) diff --git a/esphome/components/esp8266/iram_fix.py.script b/esphome/components/esp8266/iram_fix.py.script deleted file mode 100644 index 96bddc2ced..0000000000 --- a/esphome/components/esp8266/iram_fix.py.script +++ /dev/null @@ -1,44 +0,0 @@ -import os -import re - -# pylint: disable=E0602 -Import("env") # noqa - - -def patch_linker_script_after_preprocess(source, target, env): - """Patch the local linker script after PlatformIO preprocesses it.""" - # Check if we're in testing mode by looking for the define - build_flags = env.get("BUILD_FLAGS", []) - testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) - - if not testing_mode: - return - - # Get the local linker script path - build_dir = env.subst("$BUILD_DIR") - local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld") - - if not os.path.exists(local_ld): - return - - # Read the linker script - with open(local_ld, "r") as f: - content = f.read() - - # Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB) - # The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000 - updated = re.sub( - r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000", - r"\g<1>0x200000", - content, - ) - - if updated != content: - with open(local_ld, "w") as f: - f.write(updated) - print("ESPHome: Patched IRAM size to 2MB for testing mode") - - -# Hook into the build process right before linking -# This runs after PlatformIO has already preprocessed the linker scripts -env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess) diff --git a/esphome/components/esp8266/testing_mode.py.script b/esphome/components/esp8266/testing_mode.py.script new file mode 100644 index 0000000000..44d84b765c --- /dev/null +++ b/esphome/components/esp8266/testing_mode.py.script @@ -0,0 +1,166 @@ +import os +import re + +# pylint: disable=E0602 +Import("env") # noqa + + +# Memory sizes for testing mode (allow larger builds for CI component grouping) +TESTING_IRAM_SIZE = "0x200000" # 2MB +TESTING_DRAM_SIZE = "0x200000" # 2MB +TESTING_FLASH_SIZE = "0x2000000" # 32MB + + +def patch_segment_size(content, segment_name, new_size, label): + """Patch a memory segment's length in linker script. + + Args: + content: Linker script content + segment_name: Name of the segment (e.g., 'iram1_0_seg') + new_size: New size as hex string (e.g., '0x200000') + label: Human-readable label for logging (e.g., 'IRAM') + + Returns: + Tuple of (patched_content, was_patched) + """ + # Match: segment_name : org = 0x..., len = 0x... + pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+" + new_content = re.sub(pattern, rf"\g<1>{new_size}", content) + return new_content, new_content != content + + +def apply_memory_patches(content): + """Apply IRAM, DRAM, and Flash patches to linker script content. + + Args: + content: Linker script content as string + + Returns: + Patched content as string + """ + patches_applied = [] + + # Patch IRAM (for larger code in IRAM) + content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM") + if patched: + patches_applied.append("IRAM") + + # Patch DRAM (for larger BSS/data sections) + content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM") + if patched: + patches_applied.append("DRAM") + + # Patch Flash (for larger code sections) + content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash") + if patched: + patches_applied.append("Flash") + + if patches_applied: + iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024) + dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024) + flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024) + print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)") + + return content + + +def patch_linker_script_file(filepath, description): + """Patch a linker script file in the build directory with enlarged memory segments. + + This function modifies linker scripts in the build directory only (never SDK files). + It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode. + + Args: + filepath: Path to the linker script file in the build directory + description: Human-readable description for logging + + Returns: + True if the file was patched, False if already patched or not found + """ + if not os.path.exists(filepath): + print(f"ESPHome: {description} not found at {filepath}") + return False + + print(f"ESPHome: Patching {description}...") + with open(filepath, "r") as f: + content = f.read() + + patched_content = apply_memory_patches(content) + + if patched_content != content: + with open(filepath, "w") as f: + f.write(patched_content) + print(f"ESPHome: Successfully patched {description}") + return True + else: + print(f"ESPHome: {description} already patched or no changes needed") + return False + + +def patch_local_linker_script(source, target, env): + """Patch the local.eagle.app.v6.common.ld in build directory. + + This patches the preprocessed linker script that PlatformIO creates in the build + directory, enlarging IRAM, DRAM, and Flash segments for testing mode. + + Args: + source: SCons source nodes + target: SCons target nodes + env: SCons environment + """ + # Check if we're in testing mode + build_flags = env.get("BUILD_FLAGS", []) + testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + + if not testing_mode: + return + + # Patch the local linker script if it exists + build_dir = env.subst("$BUILD_DIR") + ld_dir = os.path.join(build_dir, "ld") + if os.path.exists(ld_dir): + local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld") + if os.path.exists(local_ld): + patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld") + + +# Check if we're in testing mode +build_flags = env.get("BUILD_FLAGS", []) +testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags) + +if testing_mode: + # Create a custom linker script in the build directory with patched memory limits + # This allows larger IRAM/DRAM/Flash for CI component grouping tests + build_dir = env.subst("$BUILD_DIR") + ldscript = env.GetProjectOption("board_build.ldscript", "") + assert ldscript, "No linker script configured in board_build.ldscript" + + framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266") + assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package" + + # Read the original SDK linker script (read-only, SDK is never modified) + sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript) + # Create a custom version in the build directory (isolated, temporary) + custom_ld = os.path.join(build_dir, f"testing_{ldscript}") + + if os.path.exists(sdk_ld) and not os.path.exists(custom_ld): + # Read the SDK linker script + with open(sdk_ld, "r") as f: + content = f.read() + + # Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB) + patched_content = apply_memory_patches(content) + + # Write the patched linker script to the build directory + with open(custom_ld, "w") as f: + f.write(patched_content) + + print(f"ESPHome: Created custom linker script: {custom_ld}") + + # Tell the linker to use our custom script from the build directory + assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}" + env.Replace(LDSCRIPT_PATH=custom_ld) + print(f"ESPHome: Using custom linker script with patched memory limits") + + # Also patch local.eagle.app.v6.common.ld after PlatformIO creates it + env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script) diff --git a/esphome/components/esphome/ota/__init__.py b/esphome/components/esphome/ota/__init__.py index 69a50a2de9..e56e85b231 100644 --- a/esphome/components/esphome/ota/__init__.py +++ b/esphome/components/esphome/ota/__init__.py @@ -103,7 +103,16 @@ def ota_esphome_final_validate(config): ) -CONFIG_SCHEMA = ( +def _consume_ota_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for OTA component.""" + from esphome.components import socket + + # OTA needs 1 listening socket (client connections are temporary during updates) + socket.consume_sockets(1, "ota")(config) + return config + + +CONFIG_SCHEMA = cv.All( cv.Schema( { cv.GenerateID(): cv.declare_id(ESPHomeOTAComponent), @@ -130,7 +139,8 @@ CONFIG_SCHEMA = ( } ) .extend(BASE_OTA_SCHEMA) - .extend(cv.COMPONENT_SCHEMA) + .extend(cv.COMPONENT_SCHEMA), + _consume_ota_sockets, ) FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate diff --git a/esphome/components/espnow/automation.h b/esphome/components/espnow/automation.h index 2416377859..5415b088fd 100644 --- a/esphome/components/espnow/automation.h +++ b/esphome/components/espnow/automation.h @@ -14,13 +14,13 @@ template class SendAction : public Action, public Parente TEMPLATABLE_VALUE(std::vector, data); public: - void add_on_sent(const std::vector *> &actions) { + void add_on_sent(const std::initializer_list *> &actions) { this->sent_.add_actions(actions); if (this->flags_.wait_for_sent) { this->sent_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } } - void add_on_error(const std::vector *> &actions) { + void add_on_error(const std::initializer_list *> &actions) { this->error_.add_actions(actions); if (this->flags_.wait_for_sent) { this->error_.add_action(new LambdaAction([this](Ts... x) { diff --git a/esphome/components/espnow/packet_transport/__init__.py b/esphome/components/espnow/packet_transport/__init__.py new file mode 100644 index 0000000000..e6d66440db --- /dev/null +++ b/esphome/components/espnow/packet_transport/__init__.py @@ -0,0 +1,39 @@ +"""ESP-NOW transport platform for packet_transport component.""" + +import esphome.codegen as cg +from esphome.components.packet_transport import ( + PacketTransport, + new_packet_transport, + transport_schema, +) +import esphome.config_validation as cv +from esphome.core import HexInt +from esphome.cpp_types import PollingComponent + +from .. import ESPNowComponent, espnow_ns + +CODEOWNERS = ["@EasilyBoredEngineer"] +DEPENDENCIES = ["espnow"] + +ESPNowTransport = espnow_ns.class_("ESPNowTransport", PacketTransport, PollingComponent) + +CONF_ESPNOW_ID = "espnow_id" +CONF_PEER_ADDRESS = "peer_address" + +CONFIG_SCHEMA = transport_schema(ESPNowTransport).extend( + { + cv.GenerateID(CONF_ESPNOW_ID): cv.use_id(ESPNowComponent), + cv.Optional(CONF_PEER_ADDRESS, default="FF:FF:FF:FF:FF:FF"): cv.mac_address, + } +) + + +async def to_code(config): + """Set up the ESP-NOW transport component.""" + var, _ = await new_packet_transport(config) + + await cg.register_parented(var, config[CONF_ESPNOW_ID]) + + # Set peer address - convert MAC to parts array like ESP-NOW does + mac = config[CONF_PEER_ADDRESS] + cg.add(var.set_peer_address([HexInt(x) for x in mac.parts])) diff --git a/esphome/components/espnow/packet_transport/espnow_transport.cpp b/esphome/components/espnow/packet_transport/espnow_transport.cpp new file mode 100644 index 0000000000..d30e9447a0 --- /dev/null +++ b/esphome/components/espnow/packet_transport/espnow_transport.cpp @@ -0,0 +1,97 @@ +#include "espnow_transport.h" + +#ifdef USE_ESP32 + +#include "esphome/core/application.h" +#include "esphome/core/log.h" + +namespace esphome { +namespace espnow { + +static const char *const TAG = "espnow.transport"; + +bool ESPNowTransport::should_send() { return this->parent_ != nullptr && !this->parent_->is_failed(); } + +void ESPNowTransport::setup() { + packet_transport::PacketTransport::setup(); + + if (this->parent_ == nullptr) { + ESP_LOGE(TAG, "ESPNow component not set"); + this->mark_failed(); + return; + } + + ESP_LOGI(TAG, "Registering ESP-NOW handlers"); + ESP_LOGI(TAG, "Peer address: %02X:%02X:%02X:%02X:%02X:%02X", this->peer_address_[0], this->peer_address_[1], + this->peer_address_[2], this->peer_address_[3], this->peer_address_[4], this->peer_address_[5]); + + // Register received handler + this->parent_->register_received_handler(static_cast(this)); + + // Register broadcasted handler + this->parent_->register_broadcasted_handler(static_cast(this)); +} + +void ESPNowTransport::update() { + packet_transport::PacketTransport::update(); + this->updated_ = true; +} + +void ESPNowTransport::send_packet(const std::vector &buf) const { + if (this->parent_ == nullptr) { + ESP_LOGE(TAG, "ESPNow component not set"); + return; + } + + if (buf.empty()) { + ESP_LOGW(TAG, "Attempted to send empty packet"); + return; + } + + if (buf.size() > ESP_NOW_MAX_DATA_LEN) { + ESP_LOGE(TAG, "Packet too large: %zu bytes (max %d)", buf.size(), ESP_NOW_MAX_DATA_LEN); + return; + } + + // Send to configured peer address + this->parent_->send(this->peer_address_.data(), buf.data(), buf.size(), [](esp_err_t err) { + if (err != ESP_OK) { + ESP_LOGW(TAG, "Send failed: %d", err); + } + }); +} + +bool ESPNowTransport::on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) { + ESP_LOGV(TAG, "Received packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0], + info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]); + + if (data == nullptr || size == 0) { + ESP_LOGW(TAG, "Received empty or null packet"); + return false; + } + + this->packet_buffer_.resize(size); + memcpy(this->packet_buffer_.data(), data, size); + this->process_(this->packet_buffer_); + return false; // Allow other handlers to run +} + +bool ESPNowTransport::on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) { + ESP_LOGV(TAG, "Received broadcast packet of size %u from %02X:%02X:%02X:%02X:%02X:%02X", size, info.src_addr[0], + info.src_addr[1], info.src_addr[2], info.src_addr[3], info.src_addr[4], info.src_addr[5]); + + if (data == nullptr || size == 0) { + ESP_LOGW(TAG, "Received empty or null broadcast packet"); + return false; + } + + this->packet_buffer_.resize(size); + memcpy(this->packet_buffer_.data(), data, size); + this->process_(this->packet_buffer_); + return false; // Allow other handlers to run +} + +} // namespace espnow +} // namespace esphome + +#endif // USE_ESP32 diff --git a/esphome/components/espnow/packet_transport/espnow_transport.h b/esphome/components/espnow/packet_transport/espnow_transport.h new file mode 100644 index 0000000000..3629fad2cd --- /dev/null +++ b/esphome/components/espnow/packet_transport/espnow_transport.h @@ -0,0 +1,44 @@ +#pragma once + +#include "../espnow_component.h" + +#ifdef USE_ESP32 + +#include "esphome/core/component.h" +#include "esphome/components/packet_transport/packet_transport.h" + +#include + +namespace esphome { +namespace espnow { + +class ESPNowTransport : public packet_transport::PacketTransport, + public Parented, + public ESPNowReceivedPacketHandler, + public ESPNowBroadcastedHandler { + public: + void setup() override; + void update() override; + float get_setup_priority() const override { return setup_priority::AFTER_WIFI; } + + void set_peer_address(peer_address_t address) { + memcpy(this->peer_address_.data(), address.data(), ESP_NOW_ETH_ALEN); + } + + // ESPNow handler interface + bool on_received(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override; + bool on_broadcasted(const ESPNowRecvInfo &info, const uint8_t *data, uint8_t size) override; + + protected: + void send_packet(const std::vector &buf) const override; + size_t get_max_packet_size() override { return ESP_NOW_MAX_DATA_LEN; } + bool should_send() override; + + peer_address_t peer_address_{{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}}; + std::vector packet_buffer_; +}; + +} // namespace espnow +} // namespace esphome + +#endif // USE_ESP32 diff --git a/esphome/components/ethernet/__init__.py b/esphome/components/ethernet/__init__.py index 7384bb26d3..77f70a3630 100644 --- a/esphome/components/ethernet/__init__.py +++ b/esphome/components/ethernet/__init__.py @@ -32,6 +32,7 @@ from esphome.const import ( CONF_MISO_PIN, CONF_MODE, CONF_MOSI_PIN, + CONF_NUMBER, CONF_PAGE_ID, CONF_PIN, CONF_POLLING_INTERVAL, @@ -52,12 +53,36 @@ from esphome.core import ( coroutine_with_priority, ) import esphome.final_validate as fv +from esphome.types import ConfigType CONFLICTS_WITH = ["wifi"] DEPENDENCIES = ["esp32"] AUTO_LOAD = ["network"] LOGGER = logging.getLogger(__name__) +# RMII pins that are hardcoded on ESP32 classic and cannot be changed +# These pins are used by the internal Ethernet MAC when using RMII PHYs +ESP32_RMII_FIXED_PINS = { + 19: "EMAC_TXD0", + 21: "EMAC_TX_EN", + 22: "EMAC_TXD1", + 25: "EMAC_RXD0", + 26: "EMAC_RXD1", + 27: "EMAC_RX_CRS_DV", +} + +# RMII default pins for ESP32-P4 +# These are the default pins used by ESP-IDF and are configurable in principle, +# but ESPHome's ethernet component currently has no way to change them +ESP32P4_RMII_DEFAULT_PINS = { + 34: "EMAC_TXD0", + 35: "EMAC_TXD1", + 28: "EMAC_RX_CRS_DV", + 29: "EMAC_RXD0", + 30: "EMAC_RXD1", + 49: "EMAC_TX_EN", +} + ethernet_ns = cg.esphome_ns.namespace("ethernet") PHYRegister = ethernet_ns.struct("PHYRegister") CONF_PHY_ADDR = "phy_addr" @@ -273,7 +298,7 @@ CONFIG_SCHEMA = cv.All( ) -def _final_validate(config): +def _final_validate_spi(config): if config[CONF_TYPE] not in SPI_ETHERNET_TYPES: return if spi_configs := fv.full_config.get().get(CONF_SPI): @@ -292,9 +317,6 @@ def _final_validate(config): ) -FINAL_VALIDATE_SCHEMA = _final_validate - - def manual_ip(config): return cg.StructInitializer( ManualIP, @@ -383,3 +405,57 @@ async def to_code(config): if CORE.using_arduino: cg.add_library("WiFi", None) + + +def _final_validate_rmii_pins(config: ConfigType) -> None: + """Validate that RMII pins are not used by other components.""" + # Only validate for RMII-based PHYs on ESP32/ESP32P4 + if config[CONF_TYPE] in SPI_ETHERNET_TYPES or config[CONF_TYPE] == "OPENETH": + return # SPI and OPENETH don't use RMII + + variant = get_esp32_variant() + if variant == VARIANT_ESP32: + rmii_pins = ESP32_RMII_FIXED_PINS + is_configurable = False + elif variant == VARIANT_ESP32P4: + rmii_pins = ESP32P4_RMII_DEFAULT_PINS + is_configurable = True + else: + return # No RMII validation needed for other variants + + # Check all used pins against RMII reserved pins + for pin_list in pins.PIN_SCHEMA_REGISTRY.pins_used.values(): + for pin_path, _, pin_config in pin_list: + pin_num = pin_config.get(CONF_NUMBER) + if pin_num not in rmii_pins: + continue + # Found a conflict - show helpful error message + pin_function = rmii_pins[pin_num] + component_path = ".".join(str(p) for p in pin_path) + if is_configurable: + error_msg = ( + f"GPIO{pin_num} is used by Ethernet RMII " + f"({pin_function}) with the current default " + f"configuration. This conflicts with '{component_path}'. " + f"Please choose a different GPIO pin for " + f"'{component_path}'." + ) + else: + error_msg = ( + f"GPIO{pin_num} is reserved for Ethernet RMII " + f"({pin_function}) and cannot be used. This pin is " + f"hardcoded by ESP-IDF and cannot be changed when using " + f"RMII Ethernet PHYs. Please choose a different GPIO pin " + f"for '{component_path}'." + ) + raise cv.Invalid(error_msg, path=pin_path) + + +def _final_validate(config: ConfigType) -> ConfigType: + """Final validation for Ethernet component.""" + _final_validate_spi(config) + _final_validate_rmii_pins(config) + return config + + +FINAL_VALIDATE_SCHEMA = _final_validate diff --git a/esphome/components/event/event.cpp b/esphome/components/event/event.cpp index d27b3b378e..20549ad0a5 100644 --- a/esphome/components/event/event.cpp +++ b/esphome/components/event/event.cpp @@ -8,12 +8,19 @@ namespace event { static const char *const TAG = "event"; void Event::trigger(const std::string &event_type) { - auto found = types_.find(event_type); - if (found == types_.end()) { + // Linear search - faster than std::set for small datasets (1-5 items typical) + const std::string *found = nullptr; + for (const auto &type : this->types_) { + if (type == event_type) { + found = &type; + break; + } + } + if (found == nullptr) { ESP_LOGE(TAG, "'%s': invalid event type for trigger(): %s", this->get_name().c_str(), event_type.c_str()); return; } - last_event_type = &(*found); + last_event_type = found; ESP_LOGD(TAG, "'%s' Triggered event '%s'", this->get_name().c_str(), last_event_type->c_str()); this->event_callback_.call(event_type); } diff --git a/esphome/components/event/event.h b/esphome/components/event/event.h index a90c8ebe05..2f6267a200 100644 --- a/esphome/components/event/event.h +++ b/esphome/components/event/event.h @@ -1,6 +1,5 @@ #pragma once -#include #include #include "esphome/core/component.h" @@ -26,13 +25,13 @@ class Event : public EntityBase, public EntityBase_DeviceClass { const std::string *last_event_type; void trigger(const std::string &event_type); - void set_event_types(const std::set &event_types) { this->types_ = event_types; } - std::set get_event_types() const { return this->types_; } + void set_event_types(const std::initializer_list &event_types) { this->types_ = event_types; } + const FixedVector &get_event_types() const { return this->types_; } void add_on_event_callback(std::function &&callback); protected: CallbackManager event_callback_; - std::set types_; + FixedVector types_; }; } // namespace event diff --git a/esphome/components/fan/__init__.py b/esphome/components/fan/__init__.py index da8bf850c7..245c9f04b4 100644 --- a/esphome/components/fan/__init__.py +++ b/esphome/components/fan/__init__.py @@ -38,7 +38,6 @@ IS_PLATFORM_COMPONENT = True fan_ns = cg.esphome_ns.namespace("fan") Fan = fan_ns.class_("Fan", cg.EntityBase) -FanState = fan_ns.class_("Fan", Fan, cg.Component) FanDirection = fan_ns.enum("FanDirection", is_class=True) FAN_DIRECTION_ENUM = { diff --git a/esphome/components/fan/automation.h b/esphome/components/fan/automation.h index d480a2ef44..90661c307c 100644 --- a/esphome/components/fan/automation.h +++ b/esphome/components/fan/automation.h @@ -1,8 +1,8 @@ #pragma once -#include "esphome/core/component.h" #include "esphome/core/automation.h" -#include "fan_state.h" +#include "esphome/core/component.h" +#include "fan.h" namespace esphome { namespace fan { diff --git a/esphome/components/fan/fan.cpp b/esphome/components/fan/fan.cpp index 26065ed644..cf1ec3d6ae 100644 --- a/esphome/components/fan/fan.cpp +++ b/esphome/components/fan/fan.cpp @@ -51,7 +51,14 @@ void FanCall::validate_() { if (!this->preset_mode_.empty()) { const auto &preset_modes = traits.supported_preset_modes(); - if (preset_modes.find(this->preset_mode_) == preset_modes.end()) { + bool found = false; + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode_) { + found = true; + break; + } + } + if (!found) { ESP_LOGW(TAG, "%s: Preset mode '%s' not supported", this->parent_.get_name().c_str(), this->preset_mode_.c_str()); this->preset_mode_.clear(); } @@ -191,9 +198,14 @@ void Fan::save_state_() { if (this->get_traits().supports_preset_modes() && !this->preset_mode.empty()) { const auto &preset_modes = this->get_traits().supported_preset_modes(); // Store index of current preset mode - auto preset_iterator = preset_modes.find(this->preset_mode); - if (preset_iterator != preset_modes.end()) - state.preset_mode = std::distance(preset_modes.begin(), preset_iterator); + size_t i = 0; + for (const auto &mode : preset_modes) { + if (mode == this->preset_mode) { + state.preset_mode = i; + break; + } + i++; + } } this->rtc_.save(&state); diff --git a/esphome/components/fan/fan_state.cpp b/esphome/components/fan/fan_state.cpp deleted file mode 100644 index 7c1658fb2e..0000000000 --- a/esphome/components/fan/fan_state.cpp +++ /dev/null @@ -1,16 +0,0 @@ -#include "fan_state.h" - -namespace esphome { -namespace fan { - -static const char *const TAG = "fan"; - -void FanState::setup() { - auto restore = this->restore_state_(); - if (restore) - restore->to_call(*this).perform(); -} -float FanState::get_setup_priority() const { return setup_priority::DATA - 1.0f; } - -} // namespace fan -} // namespace esphome diff --git a/esphome/components/fan/fan_state.h b/esphome/components/fan/fan_state.h deleted file mode 100644 index 5926e700b0..0000000000 --- a/esphome/components/fan/fan_state.h +++ /dev/null @@ -1,34 +0,0 @@ -#pragma once - -#include "esphome/core/component.h" -#include "fan.h" - -namespace esphome { -namespace fan { - -enum ESPDEPRECATED("LegacyFanDirection members are deprecated, use FanDirection instead.", - "2022.2") LegacyFanDirection { - FAN_DIRECTION_FORWARD = 0, - FAN_DIRECTION_REVERSE = 1 -}; - -class ESPDEPRECATED("FanState is deprecated, use Fan instead.", "2022.2") FanState : public Fan, public Component { - public: - FanState() = default; - - /// Get the traits of this fan. - FanTraits get_traits() override { return this->traits_; } - /// Set the traits of this fan (i.e. what features it supports). - void set_traits(const FanTraits &traits) { this->traits_ = traits; } - - void setup() override; - float get_setup_priority() const override; - - protected: - void control(const FanCall &call) override { this->publish_state(); } - - FanTraits traits_{}; -}; - -} // namespace fan -} // namespace esphome diff --git a/esphome/components/fan/fan_traits.h b/esphome/components/fan/fan_traits.h index 48509e5705..15c951b045 100644 --- a/esphome/components/fan/fan_traits.h +++ b/esphome/components/fan/fan_traits.h @@ -1,8 +1,7 @@ -#include -#include - #pragma once +#include + namespace esphome { #ifdef USE_API @@ -36,9 +35,9 @@ class FanTraits { /// Set whether this fan supports changing direction void set_direction(bool direction) { this->direction_ = direction; } /// Return the preset modes supported by the fan. - std::set supported_preset_modes() const { return this->preset_modes_; } + const std::vector &supported_preset_modes() const { return this->preset_modes_; } /// Set the preset modes supported by the fan. - void set_supported_preset_modes(const std::set &preset_modes) { this->preset_modes_ = preset_modes; } + void set_supported_preset_modes(const std::vector &preset_modes) { this->preset_modes_ = preset_modes; } /// Return if preset modes are supported bool supports_preset_modes() const { return !this->preset_modes_.empty(); } @@ -46,17 +45,17 @@ class FanTraits { #ifdef USE_API // The API connection is a friend class to access internal methods friend class api::APIConnection; - // This method returns a reference to the internal preset modes set. + // This method returns a reference to the internal preset modes. // It is used by the API to avoid copying data when encoding messages. // Warning: Do not use this method outside of the API connection code. // It returns a reference to internal data that can be invalidated. - const std::set &supported_preset_modes_for_api_() const { return this->preset_modes_; } + const std::vector &supported_preset_modes_for_api_() const { return this->preset_modes_; } #endif bool oscillation_{false}; bool speed_{false}; bool direction_{false}; int speed_count_{}; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace fan diff --git a/esphome/components/gpio/switch/gpio_switch.cpp b/esphome/components/gpio/switch/gpio_switch.cpp index b67af5e95d..9043a6a493 100644 --- a/esphome/components/gpio/switch/gpio_switch.cpp +++ b/esphome/components/gpio/switch/gpio_switch.cpp @@ -67,7 +67,7 @@ void GPIOSwitch::write_state(bool state) { this->pin_->digital_write(state); this->publish_state(state); } -void GPIOSwitch::set_interlock(const std::vector &interlock) { this->interlock_ = interlock; } +void GPIOSwitch::set_interlock(const std::initializer_list &interlock) { this->interlock_ = interlock; } } // namespace gpio } // namespace esphome diff --git a/esphome/components/gpio/switch/gpio_switch.h b/esphome/components/gpio/switch/gpio_switch.h index 94d49745b5..080decac08 100644 --- a/esphome/components/gpio/switch/gpio_switch.h +++ b/esphome/components/gpio/switch/gpio_switch.h @@ -2,10 +2,9 @@ #include "esphome/core/component.h" #include "esphome/core/hal.h" +#include "esphome/core/helpers.h" #include "esphome/components/switch/switch.h" -#include - namespace esphome { namespace gpio { @@ -19,14 +18,14 @@ class GPIOSwitch : public switch_::Switch, public Component { void setup() override; void dump_config() override; - void set_interlock(const std::vector &interlock); + void set_interlock(const std::initializer_list &interlock); void set_interlock_wait_time(uint32_t interlock_wait_time) { interlock_wait_time_ = interlock_wait_time; } protected: void write_state(bool state) override; GPIOPin *pin_; - std::vector interlock_; + FixedVector interlock_; uint32_t interlock_wait_time_{0}; }; diff --git a/esphome/components/haier/haier_base.cpp b/esphome/components/haier/haier_base.cpp index 55a2454fca..5709b8e9b5 100644 --- a/esphome/components/haier/haier_base.cpp +++ b/esphome/components/haier/haier_base.cpp @@ -65,7 +65,7 @@ HaierClimateBase::HaierClimateBase() {climate::CLIMATE_FAN_AUTO, climate::CLIMATE_FAN_LOW, climate::CLIMATE_FAN_MEDIUM, climate::CLIMATE_FAN_HIGH}); this->traits_.set_supported_swing_modes({climate::CLIMATE_SWING_OFF, climate::CLIMATE_SWING_BOTH, climate::CLIMATE_SWING_VERTICAL, climate::CLIMATE_SWING_HORIZONTAL}); - this->traits_.set_supports_current_temperature(true); + this->traits_.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); } HaierClimateBase::~HaierClimateBase() {} diff --git a/esphome/components/hbridge/fan/hbridge_fan.h b/esphome/components/hbridge/fan/hbridge_fan.h index 4234fccae3..b5fb7f5daa 100644 --- a/esphome/components/hbridge/fan/hbridge_fan.h +++ b/esphome/components/hbridge/fan/hbridge_fan.h @@ -22,7 +22,7 @@ class HBridgeFan : public Component, public fan::Fan { void set_pin_a(output::FloatOutput *pin_a) { pin_a_ = pin_a; } void set_pin_b(output::FloatOutput *pin_b) { pin_b_ = pin_b; } void set_enable_pin(output::FloatOutput *enable) { enable_ = enable; } - void set_preset_modes(const std::set &presets) { preset_modes_ = presets; } + void set_preset_modes(const std::vector &presets) { preset_modes_ = presets; } void setup() override; void dump_config() override; @@ -38,7 +38,7 @@ class HBridgeFan : public Component, public fan::Fan { int speed_count_{}; DecayMode decay_mode_{DECAY_MODE_SLOW}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; void control(const fan::FanCall &call) override; void write_state_(); diff --git a/esphome/components/hdc1080/hdc1080.cpp b/esphome/components/hdc1080/hdc1080.cpp index 71b7cd7e6e..fa293f6fc5 100644 --- a/esphome/components/hdc1080/hdc1080.cpp +++ b/esphome/components/hdc1080/hdc1080.cpp @@ -16,7 +16,8 @@ void HDC1080Component::setup() { // if configuration fails - there is a problem if (this->write_register(HDC1080_CMD_CONFIGURATION, config, 2) != i2c::ERROR_OK) { - this->mark_failed(); + ESP_LOGW(TAG, "Failed to configure HDC1080"); + this->status_set_warning(); return; } } diff --git a/esphome/components/improv_base/__init__.py b/esphome/components/improv_base/__init__.py index aa75f4d89c..e175aa2220 100644 --- a/esphome/components/improv_base/__init__.py +++ b/esphome/components/improv_base/__init__.py @@ -3,6 +3,8 @@ import re import esphome.codegen as cg import esphome.config_validation as cv from esphome.const import __version__ +from esphome.cpp_generator import MockObj +from esphome.types import ConfigType CODEOWNERS = ["@esphome/core"] @@ -35,7 +37,9 @@ def _process_next_url(url: str): return url -async def setup_improv_core(var, config): - if CONF_NEXT_URL in config: - cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL]))) +async def setup_improv_core(var: MockObj, config: ConfigType, component: str): + if next_url := config.get(CONF_NEXT_URL): + cg.add(var.set_next_url(_process_next_url(next_url))) + cg.add_define(f"USE_{component.upper()}_NEXT_URL") + cg.add_library("improv/Improv", "1.2.4") diff --git a/esphome/components/improv_base/improv_base.cpp b/esphome/components/improv_base/improv_base.cpp index e890187d1a..2091390f95 100644 --- a/esphome/components/improv_base/improv_base.cpp +++ b/esphome/components/improv_base/improv_base.cpp @@ -2,36 +2,50 @@ #include "esphome/components/network/util.h" #include "esphome/core/application.h" +#include "esphome/core/defines.h" namespace esphome { namespace improv_base { +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) +static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}"; +static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1; +static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}"; +static constexpr size_t IP_ADDRESS_PLACEHOLDER_LEN = sizeof(IP_ADDRESS_PLACEHOLDER) - 1; + +static void replace_all_in_place(std::string &str, const char *placeholder, size_t placeholder_len, + const std::string &replacement) { + size_t pos = 0; + const size_t replacement_len = replacement.length(); + while ((pos = str.find(placeholder, pos)) != std::string::npos) { + str.replace(pos, placeholder_len, replacement); + pos += replacement_len; + } +} + std::string ImprovBase::get_formatted_next_url_() { if (this->next_url_.empty()) { return ""; } - std::string copy = this->next_url_; - // Device name - std::size_t pos = this->next_url_.find("{{device_name}}"); - if (pos != std::string::npos) { - const std::string &device_name = App.get_name(); - copy.replace(pos, 15, device_name); - } - // Ip address - pos = this->next_url_.find("{{ip_address}}"); - if (pos != std::string::npos) { - for (auto &ip : network::get_ip_addresses()) { - if (ip.is_ip4()) { - std::string ipa = ip.str(); - copy.replace(pos, 14, ipa); - break; - } + std::string formatted_url = this->next_url_; + + // Replace all occurrences of {{device_name}} + replace_all_in_place(formatted_url, DEVICE_NAME_PLACEHOLDER, DEVICE_NAME_PLACEHOLDER_LEN, App.get_name()); + + // Replace all occurrences of {{ip_address}} + for (auto &ip : network::get_ip_addresses()) { + if (ip.is_ip4()) { + replace_all_in_place(formatted_url, IP_ADDRESS_PLACEHOLDER, IP_ADDRESS_PLACEHOLDER_LEN, ip.str()); + break; } } - return copy; + // Note: {{esphome_version}} is replaced at code generation time in Python + + return formatted_url; } +#endif } // namespace improv_base } // namespace esphome diff --git a/esphome/components/improv_base/improv_base.h b/esphome/components/improv_base/improv_base.h index 90cd02a4ab..e4138479df 100644 --- a/esphome/components/improv_base/improv_base.h +++ b/esphome/components/improv_base/improv_base.h @@ -1,17 +1,22 @@ #pragma once #include +#include "esphome/core/defines.h" namespace esphome { namespace improv_base { class ImprovBase { public: +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) void set_next_url(const std::string &next_url) { this->next_url_ = next_url; } +#endif protected: +#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL) std::string get_formatted_next_url_(); std::string next_url_; +#endif }; } // namespace improv_base diff --git a/esphome/components/improv_serial/__init__.py b/esphome/components/improv_serial/__init__.py index 568b200a85..fb2b541707 100644 --- a/esphome/components/improv_serial/__init__.py +++ b/esphome/components/improv_serial/__init__.py @@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger async def to_code(config): var = cg.new_Pvariable(config[CONF_ID]) await cg.register_component(var, config) - await improv_base.setup_improv_core(var, config) + await improv_base.setup_improv_core(var, config, "improv_serial") diff --git a/esphome/components/improv_serial/improv_serial_component.cpp b/esphome/components/improv_serial/improv_serial_component.cpp index 28245dcfdf..ce82504d3c 100644 --- a/esphome/components/improv_serial/improv_serial_component.cpp +++ b/esphome/components/improv_serial/improv_serial_component.cpp @@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() { std::vector ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) { std::vector urls; +#ifdef USE_IMPROV_SERIAL_NEXT_URL if (!this->next_url_.empty()) { urls.push_back(this->get_formatted_next_url_()); } +#endif #ifdef USE_WEBSERVER for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) { if (ip.is_ip4()) { diff --git a/esphome/components/kuntze/kuntze.cpp b/esphome/components/kuntze/kuntze.cpp index 42545d9d54..30f98aaa99 100644 --- a/esphome/components/kuntze/kuntze.cpp +++ b/esphome/components/kuntze/kuntze.cpp @@ -14,7 +14,7 @@ void Kuntze::on_modbus_data(const std::vector &data) { auto get_16bit = [&](int i) -> uint16_t { return (uint16_t(data[i * 2]) << 8) | uint16_t(data[i * 2 + 1]); }; this->waiting_ = false; - ESP_LOGV(TAG, "Data: %s", hexencode(data).c_str()); + ESP_LOGV(TAG, "Data: %s", format_hex_pretty(data).c_str()); float value = (float) get_16bit(0); for (int i = 0; i < data[3]; i++) diff --git a/esphome/components/light/addressable_light.cpp b/esphome/components/light/addressable_light.cpp index a8e0c7b762..5cbdcb0e86 100644 --- a/esphome/components/light/addressable_light.cpp +++ b/esphome/components/light/addressable_light.cpp @@ -61,8 +61,12 @@ void AddressableLightTransformer::start() { this->target_color_ *= to_uint8_scale(end_values.get_brightness() * end_values.get_state()); } +inline constexpr uint8_t subtract_scaled_difference(uint8_t a, uint8_t b, int32_t scale) { + return uint8_t(int32_t(a) - (((int32_t(a) - int32_t(b)) * scale) / 256)); +} + optional AddressableLightTransformer::apply() { - float smoothed_progress = LightTransitionTransformer::smoothed_progress(this->get_progress_()); + float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_()); // When running an output-buffer modifying effect, don't try to transition individual LEDs, but instead just fade the // LightColorValues. write_state() then picks up the change in brightness, and the color change is picked up by the @@ -74,38 +78,37 @@ optional AddressableLightTransformer::apply() { // all LEDs, we use the current state of each LED as the start. // We can't use a direct lerp smoothing here though - that would require creating a copy of the original - // state of each LED at the start of the transition. - // Instead, we "fake" the look of the LERP by using an exponential average over time and using - // dynamically-calculated alpha values to match the look. + // state of each LED at the start of the transition. Instead, we "fake" the look of lerp by calculating + // the delta between the current state and the target state, assuming that the delta represents the rest + // of the transition that was to be applied as of the previous transition step, and scaling the delta for + // what should be left after the current transition step. In this manner, the delta decays to zero as the + // transition progresses. + // + // Here's an example of how the algorithm progresses in discrete steps: + // + // At time = 0.00, 0% complete, 100% remaining, 100% will remain after this step, so the scale is 100% / 100% = 100%. + // At time = 0.10, 0% complete, 100% remaining, 90% will remain after this step, so the scale is 90% / 100% = 90%. + // At time = 0.20, 10% complete, 90% remaining, 80% will remain after this step, so the scale is 80% / 90% = 88.9%. + // At time = 0.50, 20% complete, 80% remaining, 50% will remain after this step, so the scale is 50% / 80% = 62.5%. + // At time = 0.90, 50% complete, 50% remaining, 10% will remain after this step, so the scale is 10% / 50% = 20%. + // At time = 0.91, 90% complete, 10% remaining, 9% will remain after this step, so the scale is 9% / 10% = 90%. + // At time = 1.00, 91% complete, 9% remaining, 0% will remain after this step, so the scale is 0% / 9% = 0%. + // + // Because the color values are quantized to 8 bit resolution after each step, the transition may appear + // non-linear when applying small deltas. - float denom = (1.0f - smoothed_progress); - float alpha = denom == 0.0f ? 1.0f : (smoothed_progress - this->last_transition_progress_) / denom; - - // We need to use a low-resolution alpha here which makes the transition set in only after ~half of the length - // We solve this by accumulating the fractional part of the alpha over time. - float alpha255 = alpha * 255.0f; - float alpha255int = floorf(alpha255); - float alpha255remainder = alpha255 - alpha255int; - - this->accumulated_alpha_ += alpha255remainder; - float alpha_add = floorf(this->accumulated_alpha_); - this->accumulated_alpha_ -= alpha_add; - - alpha255 += alpha_add; - alpha255 = clamp(alpha255, 0.0f, 255.0f); - auto alpha8 = static_cast(alpha255); - - if (alpha8 != 0) { - uint8_t inv_alpha8 = 255 - alpha8; - Color add = this->target_color_ * alpha8; - - for (auto led : this->light_) - led.set(add + led.get() * inv_alpha8); + if (smoothed_progress > this->last_transition_progress_ && this->last_transition_progress_ < 1.f) { + int32_t scale = int32_t(256.f * std::max((1.f - smoothed_progress) / (1.f - this->last_transition_progress_), 0.f)); + for (auto led : this->light_) { + led.set_rgbw(subtract_scaled_difference(this->target_color_.red, led.get_red(), scale), + subtract_scaled_difference(this->target_color_.green, led.get_green(), scale), + subtract_scaled_difference(this->target_color_.blue, led.get_blue(), scale), + subtract_scaled_difference(this->target_color_.white, led.get_white(), scale)); + } + this->last_transition_progress_ = smoothed_progress; + this->light_.schedule_show(); } - this->last_transition_progress_ = smoothed_progress; - this->light_.schedule_show(); - return {}; } diff --git a/esphome/components/light/addressable_light.h b/esphome/components/light/addressable_light.h index baa4507d2f..393cc679bc 100644 --- a/esphome/components/light/addressable_light.h +++ b/esphome/components/light/addressable_light.h @@ -1,14 +1,14 @@ #pragma once -#include "esphome/core/component.h" -#include "esphome/core/defines.h" -#include "esphome/core/color.h" #include "esp_color_correction.h" #include "esp_color_view.h" #include "esp_range_view.h" +#include "esphome/core/color.h" +#include "esphome/core/component.h" +#include "esphome/core/defines.h" #include "light_output.h" #include "light_state.h" -#include "transformers.h" +#include "light_transformer.h" #ifdef USE_POWER_SUPPLY #include "esphome/components/power_supply/power_supply.h" @@ -17,8 +17,6 @@ namespace esphome { namespace light { -using ESPColor ESPDEPRECATED("esphome::light::ESPColor is deprecated, use esphome::Color instead.", "v1.21") = Color; - /// Convert the color information from a `LightColorValues` object to a `Color` object (does not apply brightness). Color color_from_light_color_values(LightColorValues val); @@ -105,7 +103,7 @@ class AddressableLight : public LightOutput, public Component { bool effect_active_{false}; }; -class AddressableLightTransformer : public LightTransitionTransformer { +class AddressableLightTransformer : public LightTransformer { public: AddressableLightTransformer(AddressableLight &light) : light_(light) {} @@ -115,7 +113,6 @@ class AddressableLightTransformer : public LightTransitionTransformer { protected: AddressableLight &light_; float last_transition_progress_{0.0f}; - float accumulated_alpha_{0.0f}; Color target_color_{}; }; diff --git a/esphome/components/light/addressable_light_effect.h b/esphome/components/light/addressable_light_effect.h index fcf76b3cb0..9840112040 100644 --- a/esphome/components/light/addressable_light_effect.h +++ b/esphome/components/light/addressable_light_effect.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/component.h" +#include "esphome/core/helpers.h" #include "esphome/components/light/light_state.h" #include "esphome/components/light/addressable_light.h" @@ -30,7 +30,7 @@ inline static uint8_t half_sin8(uint8_t v) { return sin16_c(uint16_t(v) * 128u) class AddressableLightEffect : public LightEffect { public: - explicit AddressableLightEffect(const std::string &name) : LightEffect(name) {} + explicit AddressableLightEffect(const char *name) : LightEffect(name) {} void start_internal() override { this->get_addressable_()->set_effect_active(true); this->get_addressable_()->clear_effect_data(); @@ -57,8 +57,7 @@ class AddressableLightEffect : public LightEffect { class AddressableLambdaLightEffect : public AddressableLightEffect { public: - AddressableLambdaLightEffect(const std::string &name, - std::function f, + AddressableLambdaLightEffect(const char *name, std::function f, uint32_t update_interval) : AddressableLightEffect(name), f_(std::move(f)), update_interval_(update_interval) {} void start() override { this->initial_run_ = true; } @@ -81,7 +80,7 @@ class AddressableLambdaLightEffect : public AddressableLightEffect { class AddressableRainbowLightEffect : public AddressableLightEffect { public: - explicit AddressableRainbowLightEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableRainbowLightEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { ESPHSVColor hsv; hsv.value = 255; @@ -112,8 +111,8 @@ struct AddressableColorWipeEffectColor { class AddressableColorWipeEffect : public AddressableLightEffect { public: - explicit AddressableColorWipeEffect(const std::string &name) : AddressableLightEffect(name) {} - void set_colors(const std::vector &colors) { this->colors_ = colors; } + explicit AddressableColorWipeEffect(const char *name) : AddressableLightEffect(name) {} + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } void set_add_led_interval(uint32_t add_led_interval) { this->add_led_interval_ = add_led_interval; } void set_reverse(bool reverse) { this->reverse_ = reverse; } void apply(AddressableLight &it, const Color ¤t_color) override { @@ -155,7 +154,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect { } protected: - std::vector colors_; + FixedVector colors_; size_t at_color_{0}; uint32_t last_add_{0}; uint32_t add_led_interval_{}; @@ -165,7 +164,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect { class AddressableScanEffect : public AddressableLightEffect { public: - explicit AddressableScanEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableScanEffect(const char *name) : AddressableLightEffect(name) {} void set_move_interval(uint32_t move_interval) { this->move_interval_ = move_interval; } void set_scan_width(uint32_t scan_width) { this->scan_width_ = scan_width; } void apply(AddressableLight &it, const Color ¤t_color) override { @@ -202,7 +201,7 @@ class AddressableScanEffect : public AddressableLightEffect { class AddressableTwinkleEffect : public AddressableLightEffect { public: - explicit AddressableTwinkleEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableTwinkleEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &addressable, const Color ¤t_color) override { const uint32_t now = millis(); uint8_t pos_add = 0; @@ -244,7 +243,7 @@ class AddressableTwinkleEffect : public AddressableLightEffect { class AddressableRandomTwinkleEffect : public AddressableLightEffect { public: - explicit AddressableRandomTwinkleEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableRandomTwinkleEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { const uint32_t now = millis(); uint8_t pos_add = 0; @@ -293,7 +292,7 @@ class AddressableRandomTwinkleEffect : public AddressableLightEffect { class AddressableFireworksEffect : public AddressableLightEffect { public: - explicit AddressableFireworksEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableFireworksEffect(const char *name) : AddressableLightEffect(name) {} void start() override { auto &it = *this->get_addressable_(); it.all() = Color::BLACK; @@ -342,7 +341,7 @@ class AddressableFireworksEffect : public AddressableLightEffect { class AddressableFlickerEffect : public AddressableLightEffect { public: - explicit AddressableFlickerEffect(const std::string &name) : AddressableLightEffect(name) {} + explicit AddressableFlickerEffect(const char *name) : AddressableLightEffect(name) {} void apply(AddressableLight &it, const Color ¤t_color) override { const uint32_t now = millis(); const uint8_t intensity = this->intensity_; diff --git a/esphome/components/light/base_light_effects.h b/esphome/components/light/base_light_effects.h index ff6cd1ccfe..327c243525 100644 --- a/esphome/components/light/base_light_effects.h +++ b/esphome/components/light/base_light_effects.h @@ -1,9 +1,9 @@ #pragma once #include -#include #include "esphome/core/automation.h" +#include "esphome/core/helpers.h" #include "light_effect.h" namespace esphome { @@ -17,7 +17,7 @@ inline static float random_cubic_float() { /// Pulse effect. class PulseLightEffect : public LightEffect { public: - explicit PulseLightEffect(const std::string &name) : LightEffect(name) {} + explicit PulseLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); @@ -60,7 +60,7 @@ class PulseLightEffect : public LightEffect { /// Random effect. Sets random colors every 10 seconds and slowly transitions between them. class RandomLightEffect : public LightEffect { public: - explicit RandomLightEffect(const std::string &name) : LightEffect(name) {} + explicit RandomLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); @@ -112,7 +112,7 @@ class RandomLightEffect : public LightEffect { class LambdaLightEffect : public LightEffect { public: - LambdaLightEffect(const std::string &name, std::function f, uint32_t update_interval) + LambdaLightEffect(const char *name, std::function f, uint32_t update_interval) : LightEffect(name), f_(std::move(f)), update_interval_(update_interval) {} void start() override { this->initial_run_ = true; } @@ -138,7 +138,7 @@ class LambdaLightEffect : public LightEffect { class AutomationLightEffect : public LightEffect { public: - AutomationLightEffect(const std::string &name) : LightEffect(name) {} + AutomationLightEffect(const char *name) : LightEffect(name) {} void stop() override { this->trig_->stop_action(); } void apply() override { if (!this->trig_->is_action_running()) { @@ -163,7 +163,7 @@ struct StrobeLightEffectColor { class StrobeLightEffect : public LightEffect { public: - explicit StrobeLightEffect(const std::string &name) : LightEffect(name) {} + explicit StrobeLightEffect(const char *name) : LightEffect(name) {} void apply() override { const uint32_t now = millis(); if (now - this->last_switch_ < this->colors_[this->at_color_].duration) @@ -188,17 +188,17 @@ class StrobeLightEffect : public LightEffect { this->last_switch_ = now; } - void set_colors(const std::vector &colors) { this->colors_ = colors; } + void set_colors(const std::initializer_list &colors) { this->colors_ = colors; } protected: - std::vector colors_; + FixedVector colors_; uint32_t last_switch_{0}; size_t at_color_{0}; }; class FlickerLightEffect : public LightEffect { public: - explicit FlickerLightEffect(const std::string &name) : LightEffect(name) {} + explicit FlickerLightEffect(const char *name) : LightEffect(name) {} void apply() override { LightColorValues remote = this->state_->remote_values; diff --git a/esphome/components/light/color_mode.h b/esphome/components/light/color_mode.h index e524763c9f..aa3448c145 100644 --- a/esphome/components/light/color_mode.h +++ b/esphome/components/light/color_mode.h @@ -1,6 +1,7 @@ #pragma once #include +#include "esphome/core/finite_set_mask.h" namespace esphome { namespace light { @@ -104,5 +105,110 @@ constexpr ColorModeHelper operator|(ColorModeHelper lhs, ColorMode rhs) { return static_cast(static_cast(lhs) | static_cast(rhs)); } +// Type alias for raw color mode bitmask values +using color_mode_bitmask_t = uint16_t; + +// Lookup table for ColorMode bit mapping +// This array defines the canonical order of color modes (bit 0-9) +constexpr ColorMode COLOR_MODE_LOOKUP[] = { + ColorMode::UNKNOWN, // bit 0 + ColorMode::ON_OFF, // bit 1 + ColorMode::BRIGHTNESS, // bit 2 + ColorMode::WHITE, // bit 3 + ColorMode::COLOR_TEMPERATURE, // bit 4 + ColorMode::COLD_WARM_WHITE, // bit 5 + ColorMode::RGB, // bit 6 + ColorMode::RGB_WHITE, // bit 7 + ColorMode::RGB_COLOR_TEMPERATURE, // bit 8 + ColorMode::RGB_COLD_WARM_WHITE, // bit 9 +}; + +/// Bit mapping policy for ColorMode +/// Uses lookup table for non-contiguous enum values +struct ColorModeBitPolicy { + using mask_t = uint16_t; // 10 bits requires uint16_t + static constexpr int MAX_BITS = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); + + static constexpr unsigned to_bit(ColorMode mode) { + // Linear search through lookup table + // Compiler optimizes this to efficient code since array is constexpr + for (int i = 0; i < MAX_BITS; ++i) { + if (COLOR_MODE_LOOKUP[i] == mode) + return i; + } + return 0; + } + + static constexpr ColorMode from_bit(unsigned bit) { + return (bit < MAX_BITS) ? COLOR_MODE_LOOKUP[bit] : ColorMode::UNKNOWN; + } +}; + +// Type alias for ColorMode bitmask using policy-based design +using ColorModeMask = FiniteSetMask; + +// Number of ColorCapability enum values +constexpr int COLOR_CAPABILITY_COUNT = 6; + +/// Helper to compute capability bitmask at compile time +constexpr uint16_t compute_capability_bitmask(ColorCapability capability) { + uint16_t mask = 0; + uint8_t cap_bit = static_cast(capability); + + // Check each ColorMode to see if it has this capability + constexpr int color_mode_count = sizeof(COLOR_MODE_LOOKUP) / sizeof(COLOR_MODE_LOOKUP[0]); + for (int bit = 0; bit < color_mode_count; ++bit) { + uint8_t mode_val = static_cast(COLOR_MODE_LOOKUP[bit]); + if ((mode_val & cap_bit) != 0) { + mask |= (1 << bit); + } + } + return mask; +} + +/// Compile-time lookup table mapping ColorCapability to bitmask +/// This array is computed at compile time using constexpr +constexpr uint16_t CAPABILITY_BITMASKS[] = { + compute_capability_bitmask(ColorCapability::ON_OFF), // 1 << 0 + compute_capability_bitmask(ColorCapability::BRIGHTNESS), // 1 << 1 + compute_capability_bitmask(ColorCapability::WHITE), // 1 << 2 + compute_capability_bitmask(ColorCapability::COLOR_TEMPERATURE), // 1 << 3 + compute_capability_bitmask(ColorCapability::COLD_WARM_WHITE), // 1 << 4 + compute_capability_bitmask(ColorCapability::RGB), // 1 << 5 +}; + +/** + * @brief Helper function to convert a power-of-2 ColorCapability value to an array index for CAPABILITY_BITMASKS + * lookup. + * + * This function maps ColorCapability values (1, 2, 4, 8, 16, 32) to array indices (0, 1, 2, 3, 4, 5). + * Used to index into the CAPABILITY_BITMASKS lookup table. + * + * @param capability A ColorCapability enum value (must be a power of 2). + * @return The corresponding array index (0-based). + */ +inline int capability_to_index(ColorCapability capability) { + uint8_t cap_val = static_cast(capability); +#if defined(__GNUC__) || defined(__clang__) + // Use compiler intrinsic for efficient bit position lookup (O(1) vs O(log n)) + return __builtin_ctz(cap_val); +#else + // Fallback for compilers without __builtin_ctz + int index = 0; + while (cap_val > 1) { + cap_val >>= 1; + ++index; + } + return index; +#endif +} + +/// Check if any mode in the bitmask has a specific capability +/// Used for checking if a light supports a capability (e.g., BRIGHTNESS, RGB) +inline bool has_capability(const ColorModeMask &mask, ColorCapability capability) { + // Lookup the pre-computed bitmask for this capability and check intersection with our mask + return (mask.get_mask() & CAPABILITY_BITMASKS[capability_to_index(capability)]) != 0; +} + } // namespace light } // namespace esphome diff --git a/esphome/components/light/esp_color_correction.h b/esphome/components/light/esp_color_correction.h index 979a1acb07..14c065058c 100644 --- a/esphome/components/light/esp_color_correction.h +++ b/esphome/components/light/esp_color_correction.h @@ -17,19 +17,19 @@ class ESPColorCorrection { this->color_correct_blue(color.blue), this->color_correct_white(color.white)); } inline uint8_t color_correct_red(uint8_t red) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(red, this->max_brightness_.red), this->local_brightness_); + uint8_t res = esp_scale8_twice(red, this->max_brightness_.red, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_green(uint8_t green) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(green, this->max_brightness_.green), this->local_brightness_); + uint8_t res = esp_scale8_twice(green, this->max_brightness_.green, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_blue(uint8_t blue) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(blue, this->max_brightness_.blue), this->local_brightness_); + uint8_t res = esp_scale8_twice(blue, this->max_brightness_.blue, this->local_brightness_); return this->gamma_table_[res]; } inline uint8_t color_correct_white(uint8_t white) const ESPHOME_ALWAYS_INLINE { - uint8_t res = esp_scale8(esp_scale8(white, this->max_brightness_.white), this->local_brightness_); + uint8_t res = esp_scale8_twice(white, this->max_brightness_.white, this->local_brightness_); return this->gamma_table_[res]; } inline Color color_uncorrect(Color color) const ESPHOME_ALWAYS_INLINE { diff --git a/esphome/components/light/light_call.cpp b/esphome/components/light/light_call.cpp index 915b8fdf89..df17f53adc 100644 --- a/esphome/components/light/light_call.cpp +++ b/esphome/components/light/light_call.cpp @@ -156,7 +156,7 @@ void LightCall::perform() { if (this->effect_ == 0u) { effect_s = "None"; } else { - effect_s = this->parent_->effects_[this->effect_ - 1]->get_name().c_str(); + effect_s = this->parent_->effects_[this->effect_ - 1]->get_name(); } if (publish) { @@ -406,7 +406,7 @@ void LightCall::transform_parameters_() { } } ColorMode LightCall::compute_color_mode_() { - auto supported_modes = this->parent_->get_traits().get_supported_color_modes(); + const auto &supported_modes = this->parent_->get_traits().get_supported_color_modes(); int supported_count = supported_modes.size(); // Some lights don't support any color modes (e.g. monochromatic light), leave it at unknown. @@ -425,20 +425,19 @@ ColorMode LightCall::compute_color_mode_() { // If no color mode is specified, we try to guess the color mode. This is needed for backward compatibility to // pre-colormode clients and automations, but also for the MQTT API, where HA doesn't let us know which color mode // was used for some reason. - std::set suitable_modes = this->get_suitable_color_modes_(); + // Compute intersection of suitable and supported modes using bitwise AND + color_mode_bitmask_t intersection = this->get_suitable_color_modes_mask_() & supported_modes.get_mask(); - // Don't change if the current mode is suitable. - if (suitable_modes.count(current_mode) > 0) { + // Don't change if the current mode is in the intersection (suitable AND supported) + if (ColorModeMask::mask_contains(intersection, current_mode)) { ESP_LOGI(TAG, "'%s': color mode not specified; retaining %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(current_mode))); return current_mode; } // Use the preferred suitable mode. - for (auto mode : suitable_modes) { - if (supported_modes.count(mode) == 0) - continue; - + if (intersection != 0) { + ColorMode mode = ColorModeMask::first_value_from_mask(intersection); ESP_LOGI(TAG, "'%s': color mode not specified; using %s", this->parent_->get_name().c_str(), LOG_STR_ARG(color_mode_to_human(mode))); return mode; @@ -451,7 +450,7 @@ ColorMode LightCall::compute_color_mode_() { LOG_STR_ARG(color_mode_to_human(color_mode))); return color_mode; } -std::set LightCall::get_suitable_color_modes_() { +color_mode_bitmask_t LightCall::get_suitable_color_modes_mask_() { bool has_white = this->has_white() && this->white_ > 0.0f; bool has_ct = this->has_color_temperature(); bool has_cwww = @@ -459,36 +458,44 @@ std::set LightCall::get_suitable_color_modes_() { bool has_rgb = (this->has_color_brightness() && this->color_brightness_ > 0.0f) || (this->has_red() || this->has_green() || this->has_blue()); -// Build key from flags: [rgb][cwww][ct][white] + // Build key from flags: [rgb][cwww][ct][white] #define KEY(white, ct, cwww, rgb) ((white) << 0 | (ct) << 1 | (cwww) << 2 | (rgb) << 3) uint8_t key = KEY(has_white, has_ct, has_cwww, has_rgb); switch (key) { case KEY(true, false, false, false): // white only - return {ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::WHITE, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, false): // ct only - return {ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, - ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLOR_TEMPERATURE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(true, true, false, false): // white + ct - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask( + {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, false, true, false): // cwww only - return {ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::COLD_WARM_WHITE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, false): // none - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, ColorMode::RGB, - ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE, + ColorMode::RGB, ColorMode::WHITE, ColorMode::COLOR_TEMPERATURE, ColorMode::COLD_WARM_WHITE}) + .get_mask(); case KEY(true, false, false, true): // rgb + white - return {ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); case KEY(false, true, false, true): // rgb + ct case KEY(true, true, false, true): // rgb + white + ct - return {ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, true, true): // rgb + cwww - return {ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB_COLD_WARM_WHITE}).get_mask(); case KEY(false, false, false, true): // rgb only - return {ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, ColorMode::RGB_COLD_WARM_WHITE}; + return ColorModeMask({ColorMode::RGB, ColorMode::RGB_WHITE, ColorMode::RGB_COLOR_TEMPERATURE, + ColorMode::RGB_COLD_WARM_WHITE}) + .get_mask(); default: - return {}; // conflicting flags + return 0; // conflicting flags } #undef KEY @@ -504,7 +511,7 @@ LightCall &LightCall::set_effect(const std::string &effect) { for (uint32_t i = 0; i < this->parent_->effects_.size(); i++) { LightEffect *e = this->parent_->effects_[i]; - if (strcasecmp(effect.c_str(), e->get_name().c_str()) == 0) { + if (strcasecmp(effect.c_str(), e->get_name()) == 0) { this->set_effect(i + 1); found = true; break; diff --git a/esphome/components/light/light_call.h b/esphome/components/light/light_call.h index d3a526b136..6931b58b9d 100644 --- a/esphome/components/light/light_call.h +++ b/esphome/components/light/light_call.h @@ -1,7 +1,6 @@ #pragma once #include "light_color_values.h" -#include namespace esphome { @@ -186,8 +185,8 @@ class LightCall { //// Compute the color mode that should be used for this call. ColorMode compute_color_mode_(); - /// Get potential color modes for this light call. - std::set get_suitable_color_modes_(); + /// Get potential color modes bitmask for this light call. + color_mode_bitmask_t get_suitable_color_modes_mask_(); /// Some color modes also can be set using non-native parameters, transform those calls. void transform_parameters_(); diff --git a/esphome/components/light/light_effect.h b/esphome/components/light/light_effect.h index dbaf1faf24..7b734c2001 100644 --- a/esphome/components/light/light_effect.h +++ b/esphome/components/light/light_effect.h @@ -1,7 +1,5 @@ #pragma once -#include - #include "esphome/core/component.h" namespace esphome { @@ -11,7 +9,7 @@ class LightState; class LightEffect { public: - explicit LightEffect(std::string name) : name_(std::move(name)) {} + explicit LightEffect(const char *name) : name_(name) {} /// Initialize this LightEffect. Will be called once after creation. virtual void start() {} @@ -24,7 +22,7 @@ class LightEffect { /// Apply this effect. Use the provided state for starting transitions, ... virtual void apply() = 0; - const std::string &get_name() { return this->name_; } + const char *get_name() const { return this->name_; } /// Internal method called by the LightState when this light effect is registered in it. virtual void init() {} @@ -47,7 +45,7 @@ class LightEffect { protected: LightState *state_{nullptr}; - std::string name_; + const char *name_; /// Internal method to find this effect's index in the parent light's effect list. uint32_t get_index_in_parent_() const; diff --git a/esphome/components/light/light_json_schema.cpp b/esphome/components/light/light_json_schema.cpp index 010e130612..e754c453b5 100644 --- a/esphome/components/light/light_json_schema.cpp +++ b/esphome/components/light/light_json_schema.cpp @@ -43,7 +43,6 @@ void LightJSONSchema::dump_json(LightState &state, JsonObject root) { } auto values = state.remote_values; - auto traits = state.get_output()->get_traits(); const auto color_mode = values.get_color_mode(); const char *mode_str = get_color_mode_json_str(color_mode); diff --git a/esphome/components/light/light_state.cpp b/esphome/components/light/light_state.cpp index 1d139e49e7..7b0a698bb8 100644 --- a/esphome/components/light/light_state.cpp +++ b/esphome/components/light/light_state.cpp @@ -178,12 +178,9 @@ void LightState::set_restore_mode(LightRestoreMode restore_mode) { this->restore void LightState::set_initial_state(const LightStateRTCState &initial_state) { this->initial_state_ = initial_state; } bool LightState::supports_effects() { return !this->effects_.empty(); } const FixedVector &LightState::get_effects() const { return this->effects_; } -void LightState::add_effects(const std::vector &effects) { +void LightState::add_effects(const std::initializer_list &effects) { // Called once from Python codegen during setup with all effects from YAML config - this->effects_.init(effects.size()); - for (auto *effect : effects) { - this->effects_.push_back(effect); - } + this->effects_ = effects; } void LightState::current_values_as_binary(bool *binary) { this->current_values.as_binary(binary); } @@ -191,11 +188,9 @@ void LightState::current_values_as_brightness(float *brightness) { this->current_values.as_brightness(brightness, this->gamma_correct_); } void LightState::current_values_as_rgb(float *red, float *green, float *blue, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgb(red, green, blue, this->gamma_correct_, false); } void LightState::current_values_as_rgbw(float *red, float *green, float *blue, float *white, bool color_interlock) { - auto traits = this->get_traits(); this->current_values.as_rgbw(red, green, blue, white, this->gamma_correct_, false); } void LightState::current_values_as_rgbww(float *red, float *green, float *blue, float *cold_white, float *warm_white, @@ -209,7 +204,6 @@ void LightState::current_values_as_rgbct(float *red, float *green, float *blue, white_brightness, this->gamma_correct_); } void LightState::current_values_as_cwww(float *cold_white, float *warm_white, bool constant_brightness) { - auto traits = this->get_traits(); this->current_values.as_cwww(cold_white, warm_white, this->gamma_correct_, constant_brightness); } void LightState::current_values_as_ct(float *color_temperature, float *white_brightness) { diff --git a/esphome/components/light/light_state.h b/esphome/components/light/light_state.h index a07aeb6ae5..bf63c0ec27 100644 --- a/esphome/components/light/light_state.h +++ b/esphome/components/light/light_state.h @@ -163,7 +163,7 @@ class LightState : public EntityBase, public Component { const FixedVector &get_effects() const; /// Add effects for this light state. - void add_effects(const std::vector &effects); + void add_effects(const std::initializer_list &effects); /// Get the total number of effects available for this light. size_t get_effect_count() const { return this->effects_.size(); } @@ -177,7 +177,7 @@ class LightState : public EntityBase, public Component { return 0; } for (size_t i = 0; i < this->effects_.size(); i++) { - if (strcasecmp(effect_name.c_str(), this->effects_[i]->get_name().c_str()) == 0) { + if (strcasecmp(effect_name.c_str(), this->effects_[i]->get_name()) == 0) { return i + 1; // Effects are 1-indexed in active_effect_index_ } } diff --git a/esphome/components/light/light_traits.h b/esphome/components/light/light_traits.h index a45301d148..294b0cad1d 100644 --- a/esphome/components/light/light_traits.h +++ b/esphome/components/light/light_traits.h @@ -1,8 +1,7 @@ #pragma once -#include "esphome/core/helpers.h" #include "color_mode.h" -#include +#include "esphome/core/helpers.h" namespace esphome { @@ -19,38 +18,17 @@ class LightTraits { public: LightTraits() = default; - const std::set &get_supported_color_modes() const { return this->supported_color_modes_; } - void set_supported_color_modes(std::set supported_color_modes) { - this->supported_color_modes_ = std::move(supported_color_modes); + const ColorModeMask &get_supported_color_modes() const { return this->supported_color_modes_; } + void set_supported_color_modes(ColorModeMask supported_color_modes) { + this->supported_color_modes_ = supported_color_modes; + } + void set_supported_color_modes(std::initializer_list modes) { + this->supported_color_modes_ = ColorModeMask(modes); } - bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode); } + bool supports_color_mode(ColorMode color_mode) const { return this->supported_color_modes_.count(color_mode) > 0; } bool supports_color_capability(ColorCapability color_capability) const { - for (auto mode : this->supported_color_modes_) { - if (mode & color_capability) - return true; - } - return false; - } - - ESPDEPRECATED("get_supports_brightness() is deprecated, use color modes instead.", "v1.21") - bool get_supports_brightness() const { return this->supports_color_capability(ColorCapability::BRIGHTNESS); } - ESPDEPRECATED("get_supports_rgb() is deprecated, use color modes instead.", "v1.21") - bool get_supports_rgb() const { return this->supports_color_capability(ColorCapability::RGB); } - ESPDEPRECATED("get_supports_rgb_white_value() is deprecated, use color modes instead.", "v1.21") - bool get_supports_rgb_white_value() const { - return this->supports_color_mode(ColorMode::RGB_WHITE) || - this->supports_color_mode(ColorMode::RGB_COLOR_TEMPERATURE); - } - ESPDEPRECATED("get_supports_color_temperature() is deprecated, use color modes instead.", "v1.21") - bool get_supports_color_temperature() const { - return this->supports_color_capability(ColorCapability::COLOR_TEMPERATURE); - } - ESPDEPRECATED("get_supports_color_interlock() is deprecated, use color modes instead.", "v1.21") - bool get_supports_color_interlock() const { - return this->supports_color_mode(ColorMode::RGB) && - (this->supports_color_mode(ColorMode::WHITE) || this->supports_color_mode(ColorMode::COLD_WARM_WHITE) || - this->supports_color_mode(ColorMode::COLOR_TEMPERATURE)); + return has_capability(this->supported_color_modes_, color_capability); } float get_min_mireds() const { return this->min_mireds_; } @@ -59,19 +37,9 @@ class LightTraits { void set_max_mireds(float max_mireds) { this->max_mireds_ = max_mireds; } protected: -#ifdef USE_API - // The API connection is a friend class to access internal methods - friend class api::APIConnection; - // This method returns a reference to the internal color modes set. - // It is used by the API to avoid copying data when encoding messages. - // Warning: Do not use this method outside of the API connection code. - // It returns a reference to internal data that can be invalidated. - const std::set &get_supported_color_modes_for_api_() const { return this->supported_color_modes_; } -#endif - - std::set supported_color_modes_{}; float min_mireds_{0}; float max_mireds_{0}; + ColorModeMask supported_color_modes_{}; }; } // namespace light diff --git a/esphome/components/light/light_transformer.h b/esphome/components/light/light_transformer.h index fb9b709187..a84183c03c 100644 --- a/esphome/components/light/light_transformer.h +++ b/esphome/components/light/light_transformer.h @@ -38,6 +38,10 @@ class LightTransformer { const LightColorValues &get_target_values() const { return this->target_values_; } protected: + // This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like + // transition from 0 to 1 on x = [0, 1] + static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); } + /// The progress of this transition, on a scale of 0 to 1. float get_progress_() { uint32_t now = esphome::millis(); diff --git a/esphome/components/light/transformers.h b/esphome/components/light/transformers.h index 8d49acff97..71d41a66d3 100644 --- a/esphome/components/light/transformers.h +++ b/esphome/components/light/transformers.h @@ -50,15 +50,11 @@ class LightTransitionTransformer : public LightTransformer { if (this->changing_color_mode_) p = p < 0.5f ? p * 2 : (p - 0.5) * 2; - float v = LightTransitionTransformer::smoothed_progress(p); + float v = LightTransformer::smoothed_progress(p); return LightColorValues::lerp(start, end, v); } protected: - // This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like - // transition from 0 to 1 on x = [0, 1] - static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); } - LightColorValues end_values_{}; LightColorValues intermediate_values_{}; bool changing_color_mode_{false}; diff --git a/esphome/components/logger/logger.h b/esphome/components/logger/logger.h index 2099520049..dc8e06e0c9 100644 --- a/esphome/components/logger/logger.h +++ b/esphome/components/logger/logger.h @@ -68,6 +68,9 @@ static constexpr char LOG_LEVEL_LETTER_CHARS[] = { // Maximum header size: 35 bytes fixed + 32 bytes tag + 16 bytes thread name = 83 bytes (45 byte safety margin) static constexpr uint16_t MAX_HEADER_SIZE = 128; +// "0x" + 2 hex digits per byte + '\0' +static constexpr size_t MAX_POINTER_REPRESENTATION = 2 + sizeof(void *) * 2 + 1; + #if defined(USE_ESP32) || defined(USE_ESP8266) || defined(USE_RP2040) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) /** Enum for logging UART selection * @@ -177,8 +180,11 @@ class Logger : public Component { inline void HOT format_log_to_buffer_with_terminator_(uint8_t level, const char *tag, int line, const char *format, va_list args, char *buffer, uint16_t *buffer_at, uint16_t buffer_size) { -#if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) +#if defined(USE_ESP32) || defined(USE_LIBRETINY) this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(), buffer, buffer_at, buffer_size); +#elif defined(USE_ZEPHYR) + char buff[MAX_POINTER_REPRESENTATION]; + this->write_header_to_buffer_(level, tag, line, this->get_thread_name_(buff), buffer, buffer_at, buffer_size); #else this->write_header_to_buffer_(level, tag, line, nullptr, buffer, buffer_at, buffer_size); #endif @@ -277,7 +283,11 @@ class Logger : public Component { #endif #if defined(USE_ESP32) || defined(USE_LIBRETINY) || defined(USE_ZEPHYR) - const char *HOT get_thread_name_() { + const char *HOT get_thread_name_( +#ifdef USE_ZEPHYR + char *buff +#endif + ) { #ifdef USE_ZEPHYR k_tid_t current_task = k_current_get(); #else @@ -291,7 +301,13 @@ class Logger : public Component { #elif defined(USE_LIBRETINY) return pcTaskGetTaskName(current_task); #elif defined(USE_ZEPHYR) - return k_thread_name_get(current_task); + const char *name = k_thread_name_get(current_task); + if (name) { + // zephyr print task names only if debug component is present + return name; + } + std::snprintf(buff, MAX_POINTER_REPRESENTATION, "%p", current_task); + return buff; #endif } } diff --git a/esphome/components/mdns/__init__.py b/esphome/components/mdns/__init__.py index c6a9ee1a0c..4776bef22f 100644 --- a/esphome/components/mdns/__init__.py +++ b/esphome/components/mdns/__init__.py @@ -13,6 +13,7 @@ from esphome.const import ( ) from esphome.core import CORE, Lambda, coroutine_with_priority from esphome.coroutine import CoroPriority +from esphome.types import ConfigType CODEOWNERS = ["@esphome/core"] DEPENDENCIES = ["network"] @@ -46,6 +47,19 @@ SERVICE_SCHEMA = cv.Schema( } ) + +def _consume_mdns_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for mDNS component.""" + if config.get(CONF_DISABLED): + return config + + from esphome.components import socket + + # mDNS needs 2 sockets (IPv4 + IPv6 multicast) + socket.consume_sockets(2, "mdns")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -55,6 +69,7 @@ CONFIG_SCHEMA = cv.All( } ), _remove_id_if_disabled, + _consume_mdns_sockets, ) diff --git a/esphome/components/mdns/mdns_esp32.cpp b/esphome/components/mdns/mdns_esp32.cpp index f2cb2d3ef5..c02bfcbadb 100644 --- a/esphome/components/mdns/mdns_esp32.cpp +++ b/esphome/components/mdns/mdns_esp32.cpp @@ -31,18 +31,17 @@ void MDNSComponent::setup() { mdns_instance_name_set(this->hostname_.c_str()); for (const auto &service : services) { - std::vector txt_records; - for (const auto &record : service.txt_records) { - mdns_txt_item_t it{}; + auto txt_records = std::make_unique(service.txt_records.size()); + for (size_t i = 0; i < service.txt_records.size(); i++) { + const auto &record = service.txt_records[i]; // key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_ // Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies - it.key = MDNS_STR_ARG(record.key); - it.value = MDNS_STR_ARG(record.value); - txt_records.push_back(it); + txt_records[i].key = MDNS_STR_ARG(record.key); + txt_records[i].value = MDNS_STR_ARG(record.value); } uint16_t port = const_cast &>(service.port).value(); err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port, - txt_records.data(), txt_records.size()); + txt_records.get(), service.txt_records.size()); if (err != ESP_OK) { ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err)); diff --git a/esphome/components/midea/air_conditioner.cpp b/esphome/components/midea/air_conditioner.cpp index 170a2f6a40..0ad26ebd51 100644 --- a/esphome/components/midea/air_conditioner.cpp +++ b/esphome/components/midea/air_conditioner.cpp @@ -77,7 +77,7 @@ void AirConditioner::control(const ClimateCall &call) { ClimateTraits AirConditioner::traits() { auto traits = ClimateTraits(); - traits.set_supports_current_temperature(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); traits.set_visual_min_temperature(17); traits.set_visual_max_temperature(30); traits.set_visual_temperature_step(0.5); diff --git a/esphome/components/mipi_rgb/models/waveshare.py b/esphome/components/mipi_rgb/models/waveshare.py index a38493e816..0fc765fd52 100644 --- a/esphome/components/mipi_rgb/models/waveshare.py +++ b/esphome/components/mipi_rgb/models/waveshare.py @@ -30,6 +30,19 @@ wave_4_3 = DriverChip( "blue": [14, 38, 18, 17, 10], }, ) + +wave_4_3.extend( + "WAVESHARE-5-1024X600", + width=1024, + height=600, + hsync_back_porch=145, + hsync_front_porch=170, + hsync_pulse_width=30, + vsync_back_porch=23, + vsync_front_porch=12, + vsync_pulse_width=2, +) + wave_4_3.extend( "ESP32-S3-TOUCH-LCD-7-800X480", enable_pin=[{"ch422g": None, "number": 2}, {"ch422g": None, "number": 6}], diff --git a/esphome/components/mitsubishi/mitsubishi.cpp b/esphome/components/mitsubishi/mitsubishi.cpp index 3d9207dd96..10ab4f3b5c 100644 --- a/esphome/components/mitsubishi/mitsubishi.cpp +++ b/esphome/components/mitsubishi/mitsubishi.cpp @@ -52,8 +52,9 @@ const uint8_t MITSUBISHI_BYTE16 = 0x00; climate::ClimateTraits MitsubishiClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); - traits.set_supports_action(false); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } traits.set_visual_min_temperature(MITSUBISHI_TEMP_MIN); traits.set_visual_max_temperature(MITSUBISHI_TEMP_MAX); traits.set_visual_temperature_step(1.0f); diff --git a/esphome/components/mqtt/__init__.py b/esphome/components/mqtt/__init__.py index 814fb566d4..641c70a367 100644 --- a/esphome/components/mqtt/__init__.py +++ b/esphome/components/mqtt/__init__.py @@ -58,6 +58,7 @@ from esphome.const import ( PlatformFramework, ) from esphome.core import CORE, CoroPriority, coroutine_with_priority +from esphome.types import ConfigType DEPENDENCIES = ["network"] @@ -210,6 +211,15 @@ def validate_fingerprint(value): return value +def _consume_mqtt_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for MQTT component.""" + from esphome.components import socket + + # MQTT needs 1 socket for the broker connection + socket.consume_sockets(1, "mqtt")(config) + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -306,6 +316,7 @@ CONFIG_SCHEMA = cv.All( ), validate_config, cv.only_on([PLATFORM_ESP32, PLATFORM_ESP8266, PLATFORM_BK72XX]), + _consume_mqtt_sockets, ) diff --git a/esphome/components/mqtt/mqtt_client.cpp b/esphome/components/mqtt/mqtt_client.cpp index 16f54ab8a0..9055b4421e 100644 --- a/esphome/components/mqtt/mqtt_client.cpp +++ b/esphome/components/mqtt/mqtt_client.cpp @@ -140,11 +140,8 @@ void MQTTClientComponent::send_device_info_() { #endif #ifdef USE_API_NOISE - if (api::global_api_server->get_noise_ctx()->has_psk()) { - root["api_encryption"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; - } else { - root["api_encryption_supported"] = "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; - } + root[api::global_api_server->get_noise_ctx()->has_psk() ? "api_encryption" : "api_encryption_supported"] = + "Noise_NNpsk0_25519_ChaChaPoly_SHA256"; #endif }, 2, this->discovery_info_.retain); diff --git a/esphome/components/mqtt/mqtt_component.cpp b/esphome/components/mqtt/mqtt_component.cpp index 6ceaf219ff..eb6114008a 100644 --- a/esphome/components/mqtt/mqtt_component.cpp +++ b/esphome/components/mqtt/mqtt_component.cpp @@ -85,24 +85,20 @@ bool MQTTComponent::send_discovery_() { } // Fields from EntityBase - if (this->get_entity()->has_own_name()) { - root[MQTT_NAME] = this->friendly_name(); - } else { - root[MQTT_NAME] = ""; - } + root[MQTT_NAME] = this->get_entity()->has_own_name() ? this->friendly_name() : ""; + if (this->is_disabled_by_default()) root[MQTT_ENABLED_BY_DEFAULT] = false; if (!this->get_icon().empty()) root[MQTT_ICON] = this->get_icon(); - switch (this->get_entity()->get_entity_category()) { + const auto entity_category = this->get_entity()->get_entity_category(); + switch (entity_category) { case ENTITY_CATEGORY_NONE: break; case ENTITY_CATEGORY_CONFIG: - root[MQTT_ENTITY_CATEGORY] = "config"; - break; case ENTITY_CATEGORY_DIAGNOSTIC: - root[MQTT_ENTITY_CATEGORY] = "diagnostic"; + root[MQTT_ENTITY_CATEGORY] = entity_category == ENTITY_CATEGORY_CONFIG ? "config" : "diagnostic"; break; } @@ -113,20 +109,14 @@ bool MQTTComponent::send_discovery_() { if (this->command_retain_) root[MQTT_COMMAND_RETAIN] = true; - if (this->availability_ == nullptr) { - if (!global_mqtt_client->get_availability().topic.empty()) { - root[MQTT_AVAILABILITY_TOPIC] = global_mqtt_client->get_availability().topic; - if (global_mqtt_client->get_availability().payload_available != "online") - root[MQTT_PAYLOAD_AVAILABLE] = global_mqtt_client->get_availability().payload_available; - if (global_mqtt_client->get_availability().payload_not_available != "offline") - root[MQTT_PAYLOAD_NOT_AVAILABLE] = global_mqtt_client->get_availability().payload_not_available; - } - } else if (!this->availability_->topic.empty()) { - root[MQTT_AVAILABILITY_TOPIC] = this->availability_->topic; - if (this->availability_->payload_available != "online") - root[MQTT_PAYLOAD_AVAILABLE] = this->availability_->payload_available; - if (this->availability_->payload_not_available != "offline") - root[MQTT_PAYLOAD_NOT_AVAILABLE] = this->availability_->payload_not_available; + const Availability &avail = + this->availability_ == nullptr ? global_mqtt_client->get_availability() : *this->availability_; + if (!avail.topic.empty()) { + root[MQTT_AVAILABILITY_TOPIC] = avail.topic; + if (avail.payload_available != "online") + root[MQTT_PAYLOAD_AVAILABLE] = avail.payload_available; + if (avail.payload_not_available != "offline") + root[MQTT_PAYLOAD_NOT_AVAILABLE] = avail.payload_not_available; } const MQTTDiscoveryInfo &discovery_info = global_mqtt_client->get_discovery_info(); @@ -145,10 +135,8 @@ bool MQTTComponent::send_discovery_() { if (discovery_info.object_id_generator == MQTT_DEVICE_NAME_OBJECT_ID_GENERATOR) root[MQTT_OBJECT_ID] = node_name + "_" + this->get_default_object_id_(); - std::string node_friendly_name = App.get_friendly_name(); - if (node_friendly_name.empty()) { - node_friendly_name = node_name; - } + const std::string &friendly_name_ref = App.get_friendly_name(); + const std::string &node_friendly_name = friendly_name_ref.empty() ? node_name : friendly_name_ref; std::string node_area = App.get_area(); JsonObject device_info = root[MQTT_DEVICE].to(); @@ -158,13 +146,9 @@ bool MQTTComponent::send_discovery_() { #ifdef ESPHOME_PROJECT_NAME device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_PROJECT_VERSION " (ESPHome " ESPHOME_VERSION ")"; const char *model = std::strchr(ESPHOME_PROJECT_NAME, '.'); - if (model == nullptr) { // must never happen but check anyway - device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD; - device_info[MQTT_DEVICE_MANUFACTURER] = ESPHOME_PROJECT_NAME; - } else { - device_info[MQTT_DEVICE_MODEL] = model + 1; - device_info[MQTT_DEVICE_MANUFACTURER] = std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME); - } + device_info[MQTT_DEVICE_MODEL] = model == nullptr ? ESPHOME_BOARD : model + 1; + device_info[MQTT_DEVICE_MANUFACTURER] = + model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME); #else device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time() + ")"; device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD; diff --git a/esphome/components/mqtt/mqtt_fan.h b/esphome/components/mqtt/mqtt_fan.h index fdcec0782d..78641d224f 100644 --- a/esphome/components/mqtt/mqtt_fan.h +++ b/esphome/components/mqtt/mqtt_fan.h @@ -5,7 +5,7 @@ #ifdef USE_MQTT #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #include "mqtt_component.h" namespace esphome { diff --git a/esphome/components/mqtt/mqtt_light.cpp b/esphome/components/mqtt/mqtt_light.cpp index 4f5ff408a4..883b67ffc6 100644 --- a/esphome/components/mqtt/mqtt_light.cpp +++ b/esphome/components/mqtt/mqtt_light.cpp @@ -69,6 +69,12 @@ void MQTTJSONLightComponent::send_discovery(JsonObject root, mqtt::SendDiscovery if (traits.supports_color_capability(ColorCapability::BRIGHTNESS)) root["brightness"] = true; + if (traits.supports_color_mode(ColorMode::COLOR_TEMPERATURE) || + traits.supports_color_mode(ColorMode::COLD_WARM_WHITE)) { + root[MQTT_MIN_MIREDS] = traits.get_min_mireds(); + root[MQTT_MAX_MIREDS] = traits.get_max_mireds(); + } + if (this->state_->supports_effects()) { root["effect"] = true; JsonArray effect_list = root[MQTT_EFFECT_LIST].to(); diff --git a/esphome/components/network/util.cpp b/esphome/components/network/util.cpp index 27ad9448a4..cb8f8569ad 100644 --- a/esphome/components/network/util.cpp +++ b/esphome/components/network/util.cpp @@ -99,7 +99,11 @@ const std::string &get_use_address() { return wifi::global_wifi_component->get_use_address(); #endif -#if !defined(USE_ETHERNET) && !defined(USE_MODEM) && !defined(USE_WIFI) +#ifdef USE_OPENTHREAD + return openthread::global_openthread_component->get_use_address(); +#endif + +#if !defined(USE_ETHERNET) && !defined(USE_MODEM) && !defined(USE_WIFI) && !defined(USE_OPENTHREAD) // Fallback when no network component is defined (e.g., host platform) static const std::string empty; return empty; diff --git a/esphome/components/nextion/nextion.cpp b/esphome/components/nextion/nextion.cpp index 0ce9d02e97..fc152ece1e 100644 --- a/esphome/components/nextion/nextion.cpp +++ b/esphome/components/nextion/nextion.cpp @@ -1291,9 +1291,6 @@ void Nextion::check_pending_waveform_() { void Nextion::set_writer(const nextion_writer_t &writer) { this->writer_ = writer; } -ESPDEPRECATED("set_wait_for_ack(bool) deprecated, no effect", "v1.20") -void Nextion::set_wait_for_ack(bool wait_for_ack) { ESP_LOGE(TAG, "Deprecated"); } - bool Nextion::is_updating() { return this->connection_state_.is_updating_; } } // namespace nextion diff --git a/esphome/components/openthread/__init__.py b/esphome/components/openthread/__init__.py index 3fac497c3d..01e769153a 100644 --- a/esphome/components/openthread/__init__.py +++ b/esphome/components/openthread/__init__.py @@ -4,11 +4,14 @@ from esphome.components.esp32 import ( VARIANT_ESP32H2, add_idf_sdkconfig_option, only_on_variant, + require_vfs_select, ) from esphome.components.mdns import MDNSComponent, enable_mdns_storage import esphome.config_validation as cv -from esphome.const import CONF_CHANNEL, CONF_ENABLE_IPV6, CONF_ID +from esphome.const import CONF_CHANNEL, CONF_ENABLE_IPV6, CONF_ID, CONF_USE_ADDRESS +from esphome.core import CORE import esphome.final_validate as fv +from esphome.types import ConfigType from .const import ( CONF_DEVICE_TYPE, @@ -106,6 +109,20 @@ _CONNECTION_SCHEMA = cv.Schema( } ) + +def _validate(config: ConfigType) -> ConfigType: + if CONF_USE_ADDRESS not in config: + config[CONF_USE_ADDRESS] = f"{CORE.name}.local" + return config + + +def _require_vfs_select(config): + """Register VFS select requirement during config validation.""" + # OpenThread uses esp_vfs_eventfd which requires VFS select support + require_vfs_select() + return config + + CONFIG_SCHEMA = cv.All( cv.Schema( { @@ -117,11 +134,14 @@ CONFIG_SCHEMA = cv.All( ), cv.Optional(CONF_FORCE_DATASET): cv.boolean, cv.Optional(CONF_TLV): cv.string_strict, + cv.Optional(CONF_USE_ADDRESS): cv.string_strict, } ).extend(_CONNECTION_SCHEMA), cv.has_exactly_one_key(CONF_NETWORK_KEY, CONF_TLV), cv.only_with_esp_idf, only_on_variant(supported=[VARIANT_ESP32C6, VARIANT_ESP32H2]), + _validate, + _require_vfs_select, ) @@ -141,10 +161,14 @@ FINAL_VALIDATE_SCHEMA = _final_validate async def to_code(config): cg.add_define("USE_OPENTHREAD") + # OpenThread uses esp_vfs_eventfd which requires VFS select support + require_vfs_select() + # OpenThread SRP needs access to mDNS services after setup enable_mdns_storage() ot = cg.new_Pvariable(config[CONF_ID]) + cg.add(ot.set_use_address(config[CONF_USE_ADDRESS])) await cg.register_component(ot, config) srp = cg.new_Pvariable(config[CONF_SRP_ID]) diff --git a/esphome/components/openthread/openthread.cpp b/esphome/components/openthread/openthread.cpp index b2c2519c08..db909e6b1f 100644 --- a/esphome/components/openthread/openthread.cpp +++ b/esphome/components/openthread/openthread.cpp @@ -252,6 +252,12 @@ void OpenThreadComponent::on_factory_reset(std::function callback) { ESP_LOGD(TAG, "Waiting on Confirmation Removal SRP Host and Services"); } +// set_use_address() is guaranteed to be called during component setup by Python code generation, +// so use_address_ will always be valid when get_use_address() is called - no fallback needed. +const std::string &OpenThreadComponent::get_use_address() const { return this->use_address_; } + +void OpenThreadComponent::set_use_address(const std::string &use_address) { this->use_address_ = use_address; } + } // namespace openthread } // namespace esphome diff --git a/esphome/components/openthread/openthread.h b/esphome/components/openthread/openthread.h index 5d139c633d..19dbeb4628 100644 --- a/esphome/components/openthread/openthread.h +++ b/esphome/components/openthread/openthread.h @@ -33,11 +33,15 @@ class OpenThreadComponent : public Component { void on_factory_reset(std::function callback); void defer_factory_reset_external_callback(); + const std::string &get_use_address() const; + void set_use_address(const std::string &use_address); + protected: std::optional get_omr_address_(InstanceLock &lock); bool teardown_started_{false}; bool teardown_complete_{false}; std::function factory_reset_external_callback_; + std::string use_address_; }; extern OpenThreadComponent *global_openthread_component; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables) diff --git a/esphome/components/pid/pid_climate.cpp b/esphome/components/pid/pid_climate.cpp index 8b3be36dcc..fd74eabd87 100644 --- a/esphome/components/pid/pid_climate.cpp +++ b/esphome/components/pid/pid_climate.cpp @@ -54,11 +54,10 @@ void PIDClimate::control(const climate::ClimateCall &call) { } climate::ClimateTraits PIDClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); - traits.set_supports_two_point_target_temperature(false); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_ACTION); if (this->humidity_sensor_ != nullptr) - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY); traits.set_supported_modes({climate::CLIMATE_MODE_OFF}); if (supports_cool_()) @@ -68,7 +67,6 @@ climate::ClimateTraits PIDClimate::traits() { if (supports_heat_() && supports_cool_()) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT_COOL); - traits.set_supports_action(true); return traits; } void PIDClimate::dump_config() { diff --git a/esphome/components/pipsolar/binary_sensor/__init__.py b/esphome/components/pipsolar/binary_sensor/__init__.py index 625c232ed5..5bcf1f75ee 100644 --- a/esphome/components/pipsolar/binary_sensor/__init__.py +++ b/esphome/components/pipsolar/binary_sensor/__init__.py @@ -62,7 +62,7 @@ CONF_WARNING_MPPT_OVERLOAD = "warning_mppt_overload" CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE = "warning_battery_too_low_to_charge" CONF_FAULT_DC_DC_OVER_CURRENT = "fault_dc_dc_over_current" CONF_FAULT_CODE = "fault_code" -CONF_WARNUNG_LOW_PV_ENERGY = "warnung_low_pv_energy" +CONF_WARNING_LOW_PV_ENERGY = "warning_low_pv_energy" CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START = ( "warning_high_ac_input_during_bus_soft_start" ) @@ -122,7 +122,7 @@ TYPES = [ CONF_WARNING_BATTERY_TOO_LOW_TO_CHARGE, CONF_FAULT_DC_DC_OVER_CURRENT, CONF_FAULT_CODE, - CONF_WARNUNG_LOW_PV_ENERGY, + CONF_WARNING_LOW_PV_ENERGY, CONF_WARNING_HIGH_AC_INPUT_DURING_BUS_SOFT_START, CONF_WARNING_BATTERY_EQUALIZATION, ] diff --git a/esphome/components/pipsolar/output/pipsolar_output.cpp b/esphome/components/pipsolar/output/pipsolar_output.cpp index 00ec73b56a..163fbf4eb2 100644 --- a/esphome/components/pipsolar/output/pipsolar_output.cpp +++ b/esphome/components/pipsolar/output/pipsolar_output.cpp @@ -13,7 +13,7 @@ void PipsolarOutput::write_state(float state) { if (std::find(this->possible_values_.begin(), this->possible_values_.end(), state) != this->possible_values_.end()) { ESP_LOGD(TAG, "Will write: %s out of value %f / %02.0f", tmp, state, state); - this->parent_->switch_command(std::string(tmp)); + this->parent_->queue_command(std::string(tmp)); } else { ESP_LOGD(TAG, "Will not write: %s as it is not in list of allowed values", tmp); } diff --git a/esphome/components/pipsolar/pipsolar.cpp b/esphome/components/pipsolar/pipsolar.cpp index 5751ad59f5..b92cc3be9f 100644 --- a/esphome/components/pipsolar/pipsolar.cpp +++ b/esphome/components/pipsolar/pipsolar.cpp @@ -65,631 +65,42 @@ void Pipsolar::loop() { } } - if (this->state_ == STATE_POLL_DECODED) { - std::string mode; - switch (this->used_polling_commands_[this->last_polling_command_].identifier) { - case POLLING_QPIRI: - if (this->grid_rating_voltage_) { - this->grid_rating_voltage_->publish_state(value_grid_rating_voltage_); - } - if (this->grid_rating_current_) { - this->grid_rating_current_->publish_state(value_grid_rating_current_); - } - if (this->ac_output_rating_voltage_) { - this->ac_output_rating_voltage_->publish_state(value_ac_output_rating_voltage_); - } - if (this->ac_output_rating_frequency_) { - this->ac_output_rating_frequency_->publish_state(value_ac_output_rating_frequency_); - } - if (this->ac_output_rating_current_) { - this->ac_output_rating_current_->publish_state(value_ac_output_rating_current_); - } - if (this->ac_output_rating_apparent_power_) { - this->ac_output_rating_apparent_power_->publish_state(value_ac_output_rating_apparent_power_); - } - if (this->ac_output_rating_active_power_) { - this->ac_output_rating_active_power_->publish_state(value_ac_output_rating_active_power_); - } - if (this->battery_rating_voltage_) { - this->battery_rating_voltage_->publish_state(value_battery_rating_voltage_); - } - if (this->battery_recharge_voltage_) { - this->battery_recharge_voltage_->publish_state(value_battery_recharge_voltage_); - } - if (this->battery_under_voltage_) { - this->battery_under_voltage_->publish_state(value_battery_under_voltage_); - } - if (this->battery_bulk_voltage_) { - this->battery_bulk_voltage_->publish_state(value_battery_bulk_voltage_); - } - if (this->battery_float_voltage_) { - this->battery_float_voltage_->publish_state(value_battery_float_voltage_); - } - if (this->battery_type_) { - this->battery_type_->publish_state(value_battery_type_); - } - if (this->current_max_ac_charging_current_) { - this->current_max_ac_charging_current_->publish_state(value_current_max_ac_charging_current_); - } - if (this->current_max_charging_current_) { - this->current_max_charging_current_->publish_state(value_current_max_charging_current_); - } - if (this->input_voltage_range_) { - this->input_voltage_range_->publish_state(value_input_voltage_range_); - } - // special for input voltage range switch - if (this->input_voltage_range_switch_) { - this->input_voltage_range_switch_->publish_state(value_input_voltage_range_ == 1); - } - if (this->output_source_priority_) { - this->output_source_priority_->publish_state(value_output_source_priority_); - } - // special for output source priority switches - if (this->output_source_priority_utility_switch_) { - this->output_source_priority_utility_switch_->publish_state(value_output_source_priority_ == 0); - } - if (this->output_source_priority_solar_switch_) { - this->output_source_priority_solar_switch_->publish_state(value_output_source_priority_ == 1); - } - if (this->output_source_priority_battery_switch_) { - this->output_source_priority_battery_switch_->publish_state(value_output_source_priority_ == 2); - } - if (this->output_source_priority_hybrid_switch_) { - this->output_source_priority_hybrid_switch_->publish_state(value_output_source_priority_ == 3); - } - if (this->charger_source_priority_) { - this->charger_source_priority_->publish_state(value_charger_source_priority_); - } - if (this->parallel_max_num_) { - this->parallel_max_num_->publish_state(value_parallel_max_num_); - } - if (this->machine_type_) { - this->machine_type_->publish_state(value_machine_type_); - } - if (this->topology_) { - this->topology_->publish_state(value_topology_); - } - if (this->output_mode_) { - this->output_mode_->publish_state(value_output_mode_); - } - if (this->battery_redischarge_voltage_) { - this->battery_redischarge_voltage_->publish_state(value_battery_redischarge_voltage_); - } - if (this->pv_ok_condition_for_parallel_) { - this->pv_ok_condition_for_parallel_->publish_state(value_pv_ok_condition_for_parallel_); - } - // special for pv ok condition switch - if (this->pv_ok_condition_for_parallel_switch_) { - this->pv_ok_condition_for_parallel_switch_->publish_state(value_pv_ok_condition_for_parallel_ == 1); - } - if (this->pv_power_balance_) { - this->pv_power_balance_->publish_state(value_pv_power_balance_ == 1); - } - // special for power balance switch - if (this->pv_power_balance_switch_) { - this->pv_power_balance_switch_->publish_state(value_pv_power_balance_ == 1); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QPIGS: - if (this->grid_voltage_) { - this->grid_voltage_->publish_state(value_grid_voltage_); - } - if (this->grid_frequency_) { - this->grid_frequency_->publish_state(value_grid_frequency_); - } - if (this->ac_output_voltage_) { - this->ac_output_voltage_->publish_state(value_ac_output_voltage_); - } - if (this->ac_output_frequency_) { - this->ac_output_frequency_->publish_state(value_ac_output_frequency_); - } - if (this->ac_output_apparent_power_) { - this->ac_output_apparent_power_->publish_state(value_ac_output_apparent_power_); - } - if (this->ac_output_active_power_) { - this->ac_output_active_power_->publish_state(value_ac_output_active_power_); - } - if (this->output_load_percent_) { - this->output_load_percent_->publish_state(value_output_load_percent_); - } - if (this->bus_voltage_) { - this->bus_voltage_->publish_state(value_bus_voltage_); - } - if (this->battery_voltage_) { - this->battery_voltage_->publish_state(value_battery_voltage_); - } - if (this->battery_charging_current_) { - this->battery_charging_current_->publish_state(value_battery_charging_current_); - } - if (this->battery_capacity_percent_) { - this->battery_capacity_percent_->publish_state(value_battery_capacity_percent_); - } - if (this->inverter_heat_sink_temperature_) { - this->inverter_heat_sink_temperature_->publish_state(value_inverter_heat_sink_temperature_); - } - if (this->pv_input_current_for_battery_) { - this->pv_input_current_for_battery_->publish_state(value_pv_input_current_for_battery_); - } - if (this->pv_input_voltage_) { - this->pv_input_voltage_->publish_state(value_pv_input_voltage_); - } - if (this->battery_voltage_scc_) { - this->battery_voltage_scc_->publish_state(value_battery_voltage_scc_); - } - if (this->battery_discharge_current_) { - this->battery_discharge_current_->publish_state(value_battery_discharge_current_); - } - if (this->add_sbu_priority_version_) { - this->add_sbu_priority_version_->publish_state(value_add_sbu_priority_version_); - } - if (this->configuration_status_) { - this->configuration_status_->publish_state(value_configuration_status_); - } - if (this->scc_firmware_version_) { - this->scc_firmware_version_->publish_state(value_scc_firmware_version_); - } - if (this->load_status_) { - this->load_status_->publish_state(value_load_status_); - } - if (this->battery_voltage_to_steady_while_charging_) { - this->battery_voltage_to_steady_while_charging_->publish_state( - value_battery_voltage_to_steady_while_charging_); - } - if (this->charging_status_) { - this->charging_status_->publish_state(value_charging_status_); - } - if (this->scc_charging_status_) { - this->scc_charging_status_->publish_state(value_scc_charging_status_); - } - if (this->ac_charging_status_) { - this->ac_charging_status_->publish_state(value_ac_charging_status_); - } - if (this->battery_voltage_offset_for_fans_on_) { - this->battery_voltage_offset_for_fans_on_->publish_state(value_battery_voltage_offset_for_fans_on_ / 10.0f); - } //.1 scale - if (this->eeprom_version_) { - this->eeprom_version_->publish_state(value_eeprom_version_); - } - if (this->pv_charging_power_) { - this->pv_charging_power_->publish_state(value_pv_charging_power_); - } - if (this->charging_to_floating_mode_) { - this->charging_to_floating_mode_->publish_state(value_charging_to_floating_mode_); - } - if (this->switch_on_) { - this->switch_on_->publish_state(value_switch_on_); - } - if (this->dustproof_installed_) { - this->dustproof_installed_->publish_state(value_dustproof_installed_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QMOD: - if (this->device_mode_) { - mode = value_device_mode_; - this->device_mode_->publish_state(mode); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QFLAG: - if (this->silence_buzzer_open_buzzer_) { - this->silence_buzzer_open_buzzer_->publish_state(value_silence_buzzer_open_buzzer_); - } - if (this->overload_bypass_function_) { - this->overload_bypass_function_->publish_state(value_overload_bypass_function_); - } - if (this->lcd_escape_to_default_) { - this->lcd_escape_to_default_->publish_state(value_lcd_escape_to_default_); - } - if (this->overload_restart_function_) { - this->overload_restart_function_->publish_state(value_overload_restart_function_); - } - if (this->over_temperature_restart_function_) { - this->over_temperature_restart_function_->publish_state(value_over_temperature_restart_function_); - } - if (this->backlight_on_) { - this->backlight_on_->publish_state(value_backlight_on_); - } - if (this->alarm_on_when_primary_source_interrupt_) { - this->alarm_on_when_primary_source_interrupt_->publish_state(value_alarm_on_when_primary_source_interrupt_); - } - if (this->fault_code_record_) { - this->fault_code_record_->publish_state(value_fault_code_record_); - } - if (this->power_saving_) { - this->power_saving_->publish_state(value_power_saving_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QPIWS: - if (this->warnings_present_) { - this->warnings_present_->publish_state(value_warnings_present_); - } - if (this->faults_present_) { - this->faults_present_->publish_state(value_faults_present_); - } - if (this->warning_power_loss_) { - this->warning_power_loss_->publish_state(value_warning_power_loss_); - } - if (this->fault_inverter_fault_) { - this->fault_inverter_fault_->publish_state(value_fault_inverter_fault_); - } - if (this->fault_bus_over_) { - this->fault_bus_over_->publish_state(value_fault_bus_over_); - } - if (this->fault_bus_under_) { - this->fault_bus_under_->publish_state(value_fault_bus_under_); - } - if (this->fault_bus_soft_fail_) { - this->fault_bus_soft_fail_->publish_state(value_fault_bus_soft_fail_); - } - if (this->warning_line_fail_) { - this->warning_line_fail_->publish_state(value_warning_line_fail_); - } - if (this->fault_opvshort_) { - this->fault_opvshort_->publish_state(value_fault_opvshort_); - } - if (this->fault_inverter_voltage_too_low_) { - this->fault_inverter_voltage_too_low_->publish_state(value_fault_inverter_voltage_too_low_); - } - if (this->fault_inverter_voltage_too_high_) { - this->fault_inverter_voltage_too_high_->publish_state(value_fault_inverter_voltage_too_high_); - } - if (this->warning_over_temperature_) { - this->warning_over_temperature_->publish_state(value_warning_over_temperature_); - } - if (this->warning_fan_lock_) { - this->warning_fan_lock_->publish_state(value_warning_fan_lock_); - } - if (this->warning_battery_voltage_high_) { - this->warning_battery_voltage_high_->publish_state(value_warning_battery_voltage_high_); - } - if (this->warning_battery_low_alarm_) { - this->warning_battery_low_alarm_->publish_state(value_warning_battery_low_alarm_); - } - if (this->warning_battery_under_shutdown_) { - this->warning_battery_under_shutdown_->publish_state(value_warning_battery_under_shutdown_); - } - if (this->warning_battery_derating_) { - this->warning_battery_derating_->publish_state(value_warning_battery_derating_); - } - if (this->warning_over_load_) { - this->warning_over_load_->publish_state(value_warning_over_load_); - } - if (this->warning_eeprom_failed_) { - this->warning_eeprom_failed_->publish_state(value_warning_eeprom_failed_); - } - if (this->fault_inverter_over_current_) { - this->fault_inverter_over_current_->publish_state(value_fault_inverter_over_current_); - } - if (this->fault_inverter_soft_failed_) { - this->fault_inverter_soft_failed_->publish_state(value_fault_inverter_soft_failed_); - } - if (this->fault_self_test_failed_) { - this->fault_self_test_failed_->publish_state(value_fault_self_test_failed_); - } - if (this->fault_op_dc_voltage_over_) { - this->fault_op_dc_voltage_over_->publish_state(value_fault_op_dc_voltage_over_); - } - if (this->fault_battery_open_) { - this->fault_battery_open_->publish_state(value_fault_battery_open_); - } - if (this->fault_current_sensor_failed_) { - this->fault_current_sensor_failed_->publish_state(value_fault_current_sensor_failed_); - } - if (this->fault_battery_short_) { - this->fault_battery_short_->publish_state(value_fault_battery_short_); - } - if (this->warning_power_limit_) { - this->warning_power_limit_->publish_state(value_warning_power_limit_); - } - if (this->warning_pv_voltage_high_) { - this->warning_pv_voltage_high_->publish_state(value_warning_pv_voltage_high_); - } - if (this->fault_mppt_overload_) { - this->fault_mppt_overload_->publish_state(value_fault_mppt_overload_); - } - if (this->warning_mppt_overload_) { - this->warning_mppt_overload_->publish_state(value_warning_mppt_overload_); - } - if (this->warning_battery_too_low_to_charge_) { - this->warning_battery_too_low_to_charge_->publish_state(value_warning_battery_too_low_to_charge_); - } - if (this->fault_dc_dc_over_current_) { - this->fault_dc_dc_over_current_->publish_state(value_fault_dc_dc_over_current_); - } - if (this->fault_code_) { - this->fault_code_->publish_state(value_fault_code_); - } - if (this->warnung_low_pv_energy_) { - this->warnung_low_pv_energy_->publish_state(value_warnung_low_pv_energy_); - } - if (this->warning_high_ac_input_during_bus_soft_start_) { - this->warning_high_ac_input_during_bus_soft_start_->publish_state( - value_warning_high_ac_input_during_bus_soft_start_); - } - if (this->warning_battery_equalization_) { - this->warning_battery_equalization_->publish_state(value_warning_battery_equalization_); - } - this->state_ = STATE_IDLE; - break; - case POLLING_QT: - case POLLING_QMN: - this->state_ = STATE_IDLE; - break; - } - } - if (this->state_ == STATE_POLL_CHECKED) { - bool enabled = true; - std::string fc; - char tmp[PIPSOLAR_READ_BUFFER_LENGTH]; - sprintf(tmp, "%s", this->read_buffer_); - switch (this->used_polling_commands_[this->last_polling_command_].identifier) { + switch (this->enabled_polling_commands_[this->last_polling_command_].identifier) { case POLLING_QPIRI: ESP_LOGD(TAG, "Decode QPIRI"); - sscanf(tmp, "(%f %f %f %f %f %d %d %f %f %f %f %f %d %d %d %d %d %d %d %d %d %d %f %d %d", // NOLINT - &value_grid_rating_voltage_, &value_grid_rating_current_, &value_ac_output_rating_voltage_, // NOLINT - &value_ac_output_rating_frequency_, &value_ac_output_rating_current_, // NOLINT - &value_ac_output_rating_apparent_power_, &value_ac_output_rating_active_power_, // NOLINT - &value_battery_rating_voltage_, &value_battery_recharge_voltage_, // NOLINT - &value_battery_under_voltage_, &value_battery_bulk_voltage_, &value_battery_float_voltage_, // NOLINT - &value_battery_type_, &value_current_max_ac_charging_current_, // NOLINT - &value_current_max_charging_current_, &value_input_voltage_range_, // NOLINT - &value_output_source_priority_, &value_charger_source_priority_, &value_parallel_max_num_, // NOLINT - &value_machine_type_, &value_topology_, &value_output_mode_, // NOLINT - &value_battery_redischarge_voltage_, &value_pv_ok_condition_for_parallel_, // NOLINT - &value_pv_power_balance_); // NOLINT - if (this->last_qpiri_) { - this->last_qpiri_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpiri_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QPIGS: ESP_LOGD(TAG, "Decode QPIGS"); - sscanf( // NOLINT - tmp, // NOLINT - "(%f %f %f %f %d %d %d %d %f %d %d %d %f %f %f %d %1d%1d%1d%1d%1d%1d%1d%1d %d %d %d %1d%1d%1d", // NOLINT - &value_grid_voltage_, &value_grid_frequency_, &value_ac_output_voltage_, // NOLINT - &value_ac_output_frequency_, // NOLINT - &value_ac_output_apparent_power_, &value_ac_output_active_power_, &value_output_load_percent_, // NOLINT - &value_bus_voltage_, &value_battery_voltage_, &value_battery_charging_current_, // NOLINT - &value_battery_capacity_percent_, &value_inverter_heat_sink_temperature_, // NOLINT - &value_pv_input_current_for_battery_, &value_pv_input_voltage_, &value_battery_voltage_scc_, // NOLINT - &value_battery_discharge_current_, &value_add_sbu_priority_version_, // NOLINT - &value_configuration_status_, &value_scc_firmware_version_, &value_load_status_, // NOLINT - &value_battery_voltage_to_steady_while_charging_, &value_charging_status_, // NOLINT - &value_scc_charging_status_, &value_ac_charging_status_, // NOLINT - &value_battery_voltage_offset_for_fans_on_, &value_eeprom_version_, &value_pv_charging_power_, // NOLINT - &value_charging_to_floating_mode_, &value_switch_on_, // NOLINT - &value_dustproof_installed_); // NOLINT - if (this->last_qpigs_) { - this->last_qpigs_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpigs_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QMOD: ESP_LOGD(TAG, "Decode QMOD"); - this->value_device_mode_ = char(this->read_buffer_[1]); - if (this->last_qmod_) { - this->last_qmod_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qmod_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QFLAG: ESP_LOGD(TAG, "Decode QFLAG"); - // result like:"(EbkuvxzDajy" - // get through all char: ignore first "(" Enable flag on 'E', Disable on 'D') else set the corresponding value - for (size_t i = 1; i < strlen(tmp); i++) { - switch (tmp[i]) { - case 'E': - enabled = true; - break; - case 'D': - enabled = false; - break; - case 'a': - this->value_silence_buzzer_open_buzzer_ = enabled; - break; - case 'b': - this->value_overload_bypass_function_ = enabled; - break; - case 'k': - this->value_lcd_escape_to_default_ = enabled; - break; - case 'u': - this->value_overload_restart_function_ = enabled; - break; - case 'v': - this->value_over_temperature_restart_function_ = enabled; - break; - case 'x': - this->value_backlight_on_ = enabled; - break; - case 'y': - this->value_alarm_on_when_primary_source_interrupt_ = enabled; - break; - case 'z': - this->value_fault_code_record_ = enabled; - break; - case 'j': - this->value_power_saving_ = enabled; - break; - } - } - if (this->last_qflag_) { - this->last_qflag_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qflag_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QPIWS: ESP_LOGD(TAG, "Decode QPIWS"); - // '(00000000000000000000000000000000' - // iterate over all available flag (as not all models have all flags, but at least in the same order) - this->value_warnings_present_ = false; - this->value_faults_present_ = false; - - for (size_t i = 1; i < strlen(tmp); i++) { - enabled = tmp[i] == '1'; - switch (i) { - case 1: - this->value_warning_power_loss_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 2: - this->value_fault_inverter_fault_ = enabled; - this->value_faults_present_ += enabled; - break; - case 3: - this->value_fault_bus_over_ = enabled; - this->value_faults_present_ += enabled; - break; - case 4: - this->value_fault_bus_under_ = enabled; - this->value_faults_present_ += enabled; - break; - case 5: - this->value_fault_bus_soft_fail_ = enabled; - this->value_faults_present_ += enabled; - break; - case 6: - this->value_warning_line_fail_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 7: - this->value_fault_opvshort_ = enabled; - this->value_faults_present_ += enabled; - break; - case 8: - this->value_fault_inverter_voltage_too_low_ = enabled; - this->value_faults_present_ += enabled; - break; - case 9: - this->value_fault_inverter_voltage_too_high_ = enabled; - this->value_faults_present_ += enabled; - break; - case 10: - this->value_warning_over_temperature_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 11: - this->value_warning_fan_lock_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 12: - this->value_warning_battery_voltage_high_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 13: - this->value_warning_battery_low_alarm_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 15: - this->value_warning_battery_under_shutdown_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 16: - this->value_warning_battery_derating_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 17: - this->value_warning_over_load_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 18: - this->value_warning_eeprom_failed_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 19: - this->value_fault_inverter_over_current_ = enabled; - this->value_faults_present_ += enabled; - break; - case 20: - this->value_fault_inverter_soft_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 21: - this->value_fault_self_test_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 22: - this->value_fault_op_dc_voltage_over_ = enabled; - this->value_faults_present_ += enabled; - break; - case 23: - this->value_fault_battery_open_ = enabled; - this->value_faults_present_ += enabled; - break; - case 24: - this->value_fault_current_sensor_failed_ = enabled; - this->value_faults_present_ += enabled; - break; - case 25: - this->value_fault_battery_short_ = enabled; - this->value_faults_present_ += enabled; - break; - case 26: - this->value_warning_power_limit_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 27: - this->value_warning_pv_voltage_high_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 28: - this->value_fault_mppt_overload_ = enabled; - this->value_faults_present_ += enabled; - break; - case 29: - this->value_warning_mppt_overload_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 30: - this->value_warning_battery_too_low_to_charge_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 31: - this->value_fault_dc_dc_over_current_ = enabled; - this->value_faults_present_ += enabled; - break; - case 32: - fc = tmp[i]; - fc += tmp[i + 1]; - this->value_fault_code_ = parse_number(fc).value_or(0); - break; - case 34: - this->value_warnung_low_pv_energy_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 35: - this->value_warning_high_ac_input_during_bus_soft_start_ = enabled; - this->value_warnings_present_ += enabled; - break; - case 36: - this->value_warning_battery_equalization_ = enabled; - this->value_warnings_present_ += enabled; - break; - } - } - if (this->last_qpiws_) { - this->last_qpiws_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qpiws_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QT: ESP_LOGD(TAG, "Decode QT"); - if (this->last_qt_) { - this->last_qt_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qt_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; case POLLING_QMN: ESP_LOGD(TAG, "Decode QMN"); - if (this->last_qmn_) { - this->last_qmn_->publish_state(tmp); - } - this->state_ = STATE_POLL_DECODED; + handle_qmn_((const char *) this->read_buffer_); + this->state_ = STATE_IDLE; break; default: this->state_ = STATE_IDLE; @@ -706,7 +117,7 @@ void Pipsolar::loop() { return; } // crc ok - this->used_polling_commands_[this->last_polling_command_].needs_update = false; + this->enabled_polling_commands_[this->last_polling_command_].needs_update = false; this->state_ = STATE_POLL_CHECKED; return; } else { @@ -719,9 +130,12 @@ void Pipsolar::loop() { uint8_t byte; this->read_byte(&byte); - if (this->read_pos_ == PIPSOLAR_READ_BUFFER_LENGTH) { + // make sure data and null terminator fit in buffer + if (this->read_pos_ >= PIPSOLAR_READ_BUFFER_LENGTH - 1) { this->read_pos_ = 0; this->empty_uart_buffer_(); + ESP_LOGW(TAG, "response data too long, discarding."); + break; } this->read_buffer_[this->read_pos_] = byte; this->read_pos_++; @@ -755,7 +169,8 @@ void Pipsolar::loop() { if (this->state_ == STATE_POLL) { if (millis() - this->command_start_millis_ > esphome::pipsolar::Pipsolar::COMMAND_TIMEOUT) { // command timeout - ESP_LOGD(TAG, "timeout command to poll: %s", this->used_polling_commands_[this->last_polling_command_].command); + ESP_LOGD(TAG, "timeout command to poll: %s", + this->enabled_polling_commands_[this->last_polling_command_].command); this->state_ = STATE_IDLE; } else { } @@ -786,7 +201,7 @@ uint8_t Pipsolar::check_incoming_crc_() { return 0; } -// send next command used +// send next command from queue bool Pipsolar::send_next_command_() { uint16_t crc16; if (!this->command_queue_[this->command_queue_position_].empty()) { @@ -815,14 +230,13 @@ bool Pipsolar::send_next_command_() { bool Pipsolar::send_next_poll_() { uint16_t crc16; - for (uint8_t i = 0; i < POLLING_COMMANDS_MAX; i++) { this->last_polling_command_ = (this->last_polling_command_ + 1) % POLLING_COMMANDS_MAX; - if (this->used_polling_commands_[this->last_polling_command_].length == 0) { + if (this->enabled_polling_commands_[this->last_polling_command_].length == 0) { // not enabled continue; } - if (!this->used_polling_commands_[this->last_polling_command_].needs_update) { + if (!this->enabled_polling_commands_[this->last_polling_command_].needs_update) { // no update requested continue; } @@ -830,79 +244,530 @@ bool Pipsolar::send_next_poll_() { this->command_start_millis_ = millis(); this->empty_uart_buffer_(); this->read_pos_ = 0; - crc16 = this->pipsolar_crc_(this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); - this->write_array(this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); + crc16 = this->pipsolar_crc_(this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); + this->write_array(this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); // checksum this->write(((uint8_t) ((crc16) >> 8))); // highbyte this->write(((uint8_t) ((crc16) &0xff))); // lowbyte // end Byte this->write(0x0D); ESP_LOGD(TAG, "Sending polling command : %s with length %d", - this->used_polling_commands_[this->last_polling_command_].command, - this->used_polling_commands_[this->last_polling_command_].length); + this->enabled_polling_commands_[this->last_polling_command_].command, + this->enabled_polling_commands_[this->last_polling_command_].length); return true; } return false; } -void Pipsolar::queue_command_(const char *command, uint8_t length) { +void Pipsolar::queue_command(const std::string &command) { uint8_t next_position = command_queue_position_; for (uint8_t i = 0; i < COMMAND_QUEUE_LENGTH; i++) { uint8_t testposition = (next_position + i) % COMMAND_QUEUE_LENGTH; if (command_queue_[testposition].empty()) { command_queue_[testposition] = command; - ESP_LOGD(TAG, "Command queued successfully: %s with length %u at position %d", command, - command_queue_[testposition].length(), testposition); + ESP_LOGD(TAG, "Command queued successfully: %s at position %d", command.c_str(), testposition); return; } } - ESP_LOGD(TAG, "Command queue full dropping command: %s", command); + ESP_LOGD(TAG, "Command queue full dropping command: %s", command.c_str()); } -void Pipsolar::switch_command(const std::string &command) { - ESP_LOGD(TAG, "got command: %s", command.c_str()); - queue_command_(command.c_str(), command.length()); +void Pipsolar::handle_qpiri_(const char *message) { + if (this->last_qpiri_) { + this->last_qpiri_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + + this->read_float_sensor_(message, &pos, this->grid_rating_voltage_); + this->read_float_sensor_(message, &pos, this->grid_rating_current_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_voltage_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_frequency_); + this->read_float_sensor_(message, &pos, this->ac_output_rating_current_); + + this->read_int_sensor_(message, &pos, this->ac_output_rating_apparent_power_); + this->read_int_sensor_(message, &pos, this->ac_output_rating_active_power_); + + this->read_float_sensor_(message, &pos, this->battery_rating_voltage_); + this->read_float_sensor_(message, &pos, this->battery_recharge_voltage_); + this->read_float_sensor_(message, &pos, this->battery_under_voltage_); + this->read_float_sensor_(message, &pos, this->battery_bulk_voltage_); + this->read_float_sensor_(message, &pos, this->battery_float_voltage_); + + this->read_int_sensor_(message, &pos, this->battery_type_); + this->read_int_sensor_(message, &pos, this->current_max_ac_charging_current_); + this->read_int_sensor_(message, &pos, this->current_max_charging_current_); + + esphome::optional input_voltage_range = parse_number(this->read_field_(message, &pos)); + esphome::optional output_source_priority = parse_number(this->read_field_(message, &pos)); + + this->read_int_sensor_(message, &pos, this->charger_source_priority_); + this->read_int_sensor_(message, &pos, this->parallel_max_num_); + this->read_int_sensor_(message, &pos, this->machine_type_); + this->read_int_sensor_(message, &pos, this->topology_); + this->read_int_sensor_(message, &pos, this->output_mode_); + + this->read_float_sensor_(message, &pos, this->battery_redischarge_voltage_); + + esphome::optional pv_ok_condition_for_parallel = parse_number(this->read_field_(message, &pos)); + esphome::optional pv_power_balance = parse_number(this->read_field_(message, &pos)); + + if (this->input_voltage_range_) { + this->input_voltage_range_->publish_state(input_voltage_range.value_or(NAN)); + } + // special for input voltage range switch + if (this->input_voltage_range_switch_ && input_voltage_range.has_value()) { + this->input_voltage_range_switch_->publish_state(input_voltage_range.value() == 1); + } + + if (this->output_source_priority_) { + this->output_source_priority_->publish_state(output_source_priority.value_or(NAN)); + } + // special for output source priority switches + if (this->output_source_priority_utility_switch_ && output_source_priority.has_value()) { + this->output_source_priority_utility_switch_->publish_state(output_source_priority.value() == 0); + } + if (this->output_source_priority_solar_switch_ && output_source_priority.has_value()) { + this->output_source_priority_solar_switch_->publish_state(output_source_priority.value() == 1); + } + if (this->output_source_priority_battery_switch_ && output_source_priority.has_value()) { + this->output_source_priority_battery_switch_->publish_state(output_source_priority.value() == 2); + } + if (this->output_source_priority_hybrid_switch_ && output_source_priority.has_value()) { + this->output_source_priority_hybrid_switch_->publish_state(output_source_priority.value() == 3); + } + + if (this->pv_ok_condition_for_parallel_) { + this->pv_ok_condition_for_parallel_->publish_state(pv_ok_condition_for_parallel.value_or(NAN)); + } + // special for pv ok condition switch + if (this->pv_ok_condition_for_parallel_switch_ && pv_ok_condition_for_parallel.has_value()) { + this->pv_ok_condition_for_parallel_switch_->publish_state(pv_ok_condition_for_parallel.value() == 1); + } + + if (this->pv_power_balance_) { + this->pv_power_balance_->publish_state(pv_power_balance.value_or(NAN)); + } + // special for power balance switch + if (this->pv_power_balance_switch_ && pv_power_balance.has_value()) { + this->pv_power_balance_switch_->publish_state(pv_power_balance.value() == 1); + } } + +void Pipsolar::handle_qpigs_(const char *message) { + if (this->last_qpigs_) { + this->last_qpigs_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + + this->read_float_sensor_(message, &pos, this->grid_voltage_); + this->read_float_sensor_(message, &pos, this->grid_frequency_); + this->read_float_sensor_(message, &pos, this->ac_output_voltage_); + this->read_float_sensor_(message, &pos, this->ac_output_frequency_); + + this->read_int_sensor_(message, &pos, this->ac_output_apparent_power_); + this->read_int_sensor_(message, &pos, this->ac_output_active_power_); + this->read_int_sensor_(message, &pos, this->output_load_percent_); + this->read_int_sensor_(message, &pos, this->bus_voltage_); + + this->read_float_sensor_(message, &pos, this->battery_voltage_); + + this->read_int_sensor_(message, &pos, this->battery_charging_current_); + this->read_int_sensor_(message, &pos, this->battery_capacity_percent_); + this->read_int_sensor_(message, &pos, this->inverter_heat_sink_temperature_); + + this->read_float_sensor_(message, &pos, this->pv_input_current_for_battery_); + this->read_float_sensor_(message, &pos, this->pv_input_voltage_); + this->read_float_sensor_(message, &pos, this->battery_voltage_scc_); + + this->read_int_sensor_(message, &pos, this->battery_discharge_current_); + + std::string device_status_1 = this->read_field_(message, &pos); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 0), this->add_sbu_priority_version_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 1), this->configuration_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 2), this->scc_firmware_version_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 3), this->load_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 4), this->battery_voltage_to_steady_while_charging_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 5), this->charging_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 6), this->scc_charging_status_); + this->publish_binary_sensor_(this->get_bit_(device_status_1, 7), this->ac_charging_status_); + + esphome::optional battery_voltage_offset_for_fans_on = parse_number(this->read_field_(message, &pos)); + if (this->battery_voltage_offset_for_fans_on_) { + this->battery_voltage_offset_for_fans_on_->publish_state(battery_voltage_offset_for_fans_on.value_or(NAN) / 10.0f); + } + this->read_int_sensor_(message, &pos, this->eeprom_version_); + this->read_int_sensor_(message, &pos, this->pv_charging_power_); + + std::string device_status_2 = this->read_field_(message, &pos); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 0), this->charging_to_floating_mode_); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 1), this->switch_on_); + this->publish_binary_sensor_(this->get_bit_(device_status_2, 2), this->dustproof_installed_); +} + +void Pipsolar::handle_qmod_(const char *message) { + std::string mode; + char device_mode = char(message[1]); + if (this->last_qmod_) { + this->last_qmod_->publish_state(message); + } + if (this->device_mode_) { + mode = device_mode; + this->device_mode_->publish_state(mode); + } +} + +void Pipsolar::handle_qflag_(const char *message) { + // result like:"(EbkuvxzDajy" + // get through all char: ignore first "(" Enable flag on 'E', Disable on 'D') else set the corresponding value + if (this->last_qflag_) { + this->last_qflag_->publish_state(message); + } + + QFLAGValues values = QFLAGValues(); + bool enabled = true; + for (size_t i = 1; i < strlen(message); i++) { + switch (message[i]) { + case 'E': + enabled = true; + break; + case 'D': + enabled = false; + break; + case 'a': + values.silence_buzzer_open_buzzer = enabled; + break; + case 'b': + values.overload_bypass_function = enabled; + break; + case 'k': + values.lcd_escape_to_default = enabled; + break; + case 'u': + values.overload_restart_function = enabled; + break; + case 'v': + values.over_temperature_restart_function = enabled; + break; + case 'x': + values.backlight_on = enabled; + break; + case 'y': + values.alarm_on_when_primary_source_interrupt = enabled; + break; + case 'z': + values.fault_code_record = enabled; + break; + case 'j': + values.power_saving = enabled; + break; + } + } + + this->publish_binary_sensor_(values.silence_buzzer_open_buzzer, this->silence_buzzer_open_buzzer_); + this->publish_binary_sensor_(values.overload_bypass_function, this->overload_bypass_function_); + this->publish_binary_sensor_(values.lcd_escape_to_default, this->lcd_escape_to_default_); + this->publish_binary_sensor_(values.overload_restart_function, this->overload_restart_function_); + this->publish_binary_sensor_(values.over_temperature_restart_function, this->over_temperature_restart_function_); + this->publish_binary_sensor_(values.backlight_on, this->backlight_on_); + this->publish_binary_sensor_(values.alarm_on_when_primary_source_interrupt, + this->alarm_on_when_primary_source_interrupt_); + this->publish_binary_sensor_(values.fault_code_record, this->fault_code_record_); + this->publish_binary_sensor_(values.power_saving, this->power_saving_); +} + +void Pipsolar::handle_qpiws_(const char *message) { + // '(00000000000000000000000000000000' + // iterate over all available flag (as not all models have all flags, but at least in the same order) + if (this->last_qpiws_) { + this->last_qpiws_->publish_state(message); + } + + size_t pos = 0; + this->skip_start_(message, &pos); + std::string flags = this->read_field_(message, &pos); + + esphome::optional enabled; + bool value_warnings_present = false; + bool value_faults_present = false; + + for (size_t i = 0; i < 36; i++) { + if (i == 31 || i == 32) { + // special case for fault code + continue; + } + enabled = this->get_bit_(flags, i); + switch (i) { + case 0: + this->publish_binary_sensor_(enabled, this->warning_power_loss_); + value_warnings_present |= enabled.value_or(false); + break; + case 1: + this->publish_binary_sensor_(enabled, this->fault_inverter_fault_); + value_faults_present |= enabled.value_or(false); + break; + case 2: + this->publish_binary_sensor_(enabled, this->fault_bus_over_); + value_faults_present |= enabled.value_or(false); + break; + case 3: + this->publish_binary_sensor_(enabled, this->fault_bus_under_); + value_faults_present |= enabled.value_or(false); + break; + case 4: + this->publish_binary_sensor_(enabled, this->fault_bus_soft_fail_); + value_faults_present |= enabled.value_or(false); + break; + case 5: + this->publish_binary_sensor_(enabled, this->warning_line_fail_); + value_warnings_present |= enabled.value_or(false); + break; + case 6: + this->publish_binary_sensor_(enabled, this->fault_opvshort_); + value_faults_present |= enabled.value_or(false); + break; + case 7: + this->publish_binary_sensor_(enabled, this->fault_inverter_voltage_too_low_); + value_faults_present |= enabled.value_or(false); + break; + case 8: + this->publish_binary_sensor_(enabled, this->fault_inverter_voltage_too_high_); + value_faults_present |= enabled.value_or(false); + break; + case 9: + this->publish_binary_sensor_(enabled, this->warning_over_temperature_); + value_warnings_present |= enabled.value_or(false); + break; + case 10: + this->publish_binary_sensor_(enabled, this->warning_fan_lock_); + value_warnings_present |= enabled.value_or(false); + break; + case 11: + this->publish_binary_sensor_(enabled, this->warning_battery_voltage_high_); + value_warnings_present |= enabled.value_or(false); + break; + case 12: + this->publish_binary_sensor_(enabled, this->warning_battery_low_alarm_); + value_warnings_present |= enabled.value_or(false); + break; + case 14: + this->publish_binary_sensor_(enabled, this->warning_battery_under_shutdown_); + value_warnings_present |= enabled.value_or(false); + break; + case 15: + this->publish_binary_sensor_(enabled, this->warning_battery_derating_); + value_warnings_present |= enabled.value_or(false); + break; + case 16: + this->publish_binary_sensor_(enabled, this->warning_over_load_); + value_warnings_present |= enabled.value_or(false); + break; + case 17: + this->publish_binary_sensor_(enabled, this->warning_eeprom_failed_); + value_warnings_present |= enabled.value_or(false); + break; + case 18: + this->publish_binary_sensor_(enabled, this->fault_inverter_over_current_); + value_faults_present |= enabled.value_or(false); + break; + case 19: + this->publish_binary_sensor_(enabled, this->fault_inverter_soft_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 20: + this->publish_binary_sensor_(enabled, this->fault_self_test_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 21: + this->publish_binary_sensor_(enabled, this->fault_op_dc_voltage_over_); + value_faults_present |= enabled.value_or(false); + break; + case 22: + this->publish_binary_sensor_(enabled, this->fault_battery_open_); + value_faults_present |= enabled.value_or(false); + break; + case 23: + this->publish_binary_sensor_(enabled, this->fault_current_sensor_failed_); + value_faults_present |= enabled.value_or(false); + break; + case 24: + this->publish_binary_sensor_(enabled, this->fault_battery_short_); + value_faults_present |= enabled.value_or(false); + break; + case 25: + this->publish_binary_sensor_(enabled, this->warning_power_limit_); + value_warnings_present |= enabled.value_or(false); + break; + case 26: + this->publish_binary_sensor_(enabled, this->warning_pv_voltage_high_); + value_warnings_present |= enabled.value_or(false); + break; + case 27: + this->publish_binary_sensor_(enabled, this->fault_mppt_overload_); + value_faults_present |= enabled.value_or(false); + break; + case 28: + this->publish_binary_sensor_(enabled, this->warning_mppt_overload_); + value_warnings_present |= enabled.value_or(false); + break; + case 29: + this->publish_binary_sensor_(enabled, this->warning_battery_too_low_to_charge_); + value_warnings_present |= enabled.value_or(false); + break; + case 30: + this->publish_binary_sensor_(enabled, this->fault_dc_dc_over_current_); + value_faults_present |= enabled.value_or(false); + break; + case 33: + this->publish_binary_sensor_(enabled, this->warning_low_pv_energy_); + value_warnings_present |= enabled.value_or(false); + break; + case 34: + this->publish_binary_sensor_(enabled, this->warning_high_ac_input_during_bus_soft_start_); + value_warnings_present |= enabled.value_or(false); + case 35: + this->publish_binary_sensor_(enabled, this->warning_battery_equalization_); + value_warnings_present |= enabled.value_or(false); + break; + } + } + + this->publish_binary_sensor_(value_warnings_present, this->warnings_present_); + this->publish_binary_sensor_(value_faults_present, this->faults_present_); + + if (this->fault_code_) { + if (flags.length() < 33) { + this->fault_code_->publish_state(NAN); + } else { + std::string fc(flags, 31, 2); + this->fault_code_->publish_state(parse_number(fc).value_or(NAN)); + } + } +} + +void Pipsolar::handle_qt_(const char *message) { + if (this->last_qt_) { + this->last_qt_->publish_state(message); + } +} + +void Pipsolar::handle_qmn_(const char *message) { + if (this->last_qmn_) { + this->last_qmn_->publish_state(message); + } +} + +void Pipsolar::skip_start_(const char *message, size_t *pos) { + if (message[*pos] == '(') { + (*pos)++; + } +} +void Pipsolar::skip_field_(const char *message, size_t *pos) { + // find delimiter or end of string + while (message[*pos] != '\0' && message[*pos] != ' ') { + (*pos)++; + } + if (message[*pos] != '\0') { + // skip delimiter after this field if there is one + (*pos)++; + } +} +std::string Pipsolar::read_field_(const char *message, size_t *pos) { + size_t begin = *pos; + // find delimiter or end of string + while (message[*pos] != '\0' && message[*pos] != ' ') { + (*pos)++; + } + if (*pos == begin) { + return ""; + } + + std::string field(message, begin, *pos - begin); + + if (message[*pos] != '\0') { + // skip delimiter after this field if there is one + (*pos)++; + } + + return field; +} + +void Pipsolar::read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor) { + if (sensor != nullptr) { + std::string field = this->read_field_(message, pos); + sensor->publish_state(parse_number(field).value_or(NAN)); + } else { + this->skip_field_(message, pos); + } +} +void Pipsolar::read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor) { + if (sensor != nullptr) { + std::string field = this->read_field_(message, pos); + esphome::optional parsed = parse_number(field); + sensor->publish_state(parsed.has_value() ? parsed.value() : NAN); + } else { + this->skip_field_(message, pos); + } +} + +void Pipsolar::publish_binary_sensor_(esphome::optional b, binary_sensor::BinarySensor *sensor) { + if (sensor) { + if (b.has_value()) { + sensor->publish_state(b.value()); + } else { + sensor->invalidate_state(); + } + } +} + +esphome::optional Pipsolar::get_bit_(std::string bits, uint8_t bit_pos) { + if (bit_pos >= bits.length()) { + return {}; + } + return bits[bit_pos] == '1'; +} + void Pipsolar::dump_config() { ESP_LOGCONFIG(TAG, "Pipsolar:\n" - "used commands:"); - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length != 0) { - ESP_LOGCONFIG(TAG, "%s", used_polling_command.command); + "enabled polling commands:"); + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length != 0) { + ESP_LOGCONFIG(TAG, "%s", enabled_polling_command.command); } } } void Pipsolar::update() { - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length != 0) { - used_polling_command.needs_update = true; + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length != 0) { + enabled_polling_command.needs_update = true; } } } void Pipsolar::add_polling_command_(const char *command, ENUMPollingCommand polling_command) { - for (auto &used_polling_command : this->used_polling_commands_) { - if (used_polling_command.length == strlen(command)) { + for (auto &enabled_polling_command : this->enabled_polling_commands_) { + if (enabled_polling_command.length == strlen(command)) { uint8_t len = strlen(command); - if (memcmp(used_polling_command.command, command, len) == 0) { + if (memcmp(enabled_polling_command.command, command, len) == 0) { return; } } - if (used_polling_command.length == 0) { - size_t length = strlen(command) + 1; - const char *beg = command; - const char *end = command + length; - used_polling_command.command = new uint8_t[length]; // NOLINT(cppcoreguidelines-owning-memory) - size_t i = 0; - for (; beg != end; ++beg, ++i) { - used_polling_command.command[i] = (uint8_t) (*beg); + if (enabled_polling_command.length == 0) { + size_t length = strlen(command); + + enabled_polling_command.command = new uint8_t[length + 1]; // NOLINT(cppcoreguidelines-owning-memory) + for (size_t i = 0; i < length + 1; i++) { + enabled_polling_command.command[i] = (uint8_t) command[i]; } - used_polling_command.errors = 0; - used_polling_command.identifier = polling_command; - used_polling_command.length = length - 1; - used_polling_command.needs_update = true; + enabled_polling_command.errors = 0; + enabled_polling_command.identifier = polling_command; + enabled_polling_command.length = length; + enabled_polling_command.needs_update = true; return; } } diff --git a/esphome/components/pipsolar/pipsolar.h b/esphome/components/pipsolar/pipsolar.h index 77b18badb9..40056bac9d 100644 --- a/esphome/components/pipsolar/pipsolar.h +++ b/esphome/components/pipsolar/pipsolar.h @@ -7,6 +7,7 @@ #include "esphome/components/uart/uart.h" #include "esphome/core/automation.h" #include "esphome/core/component.h" +#include "esphome/core/helpers.h" namespace esphome { namespace pipsolar { @@ -28,10 +29,17 @@ struct PollingCommand { bool needs_update; }; -#define PIPSOLAR_VALUED_ENTITY_(type, name, polling_command, value_type) \ - protected: \ - value_type value_##name##_; \ - PIPSOLAR_ENTITY_(type, name, polling_command) +struct QFLAGValues { + esphome::optional silence_buzzer_open_buzzer; + esphome::optional overload_bypass_function; + esphome::optional lcd_escape_to_default; + esphome::optional overload_restart_function; + esphome::optional over_temperature_restart_function; + esphome::optional backlight_on; + esphome::optional alarm_on_when_primary_source_interrupt; + esphome::optional fault_code_record; + esphome::optional power_saving; +}; #define PIPSOLAR_ENTITY_(type, name, polling_command) \ protected: \ @@ -43,126 +51,123 @@ struct PollingCommand { this->add_polling_command_(#polling_command, POLLING_##polling_command); \ } -#define PIPSOLAR_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(sensor::Sensor, name, polling_command, value_type) +#define PIPSOLAR_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(sensor::Sensor, name, polling_command) #define PIPSOLAR_SWITCH(name, polling_command) PIPSOLAR_ENTITY_(switch_::Switch, name, polling_command) -#define PIPSOLAR_BINARY_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(binary_sensor::BinarySensor, name, polling_command, value_type) -#define PIPSOLAR_VALUED_TEXT_SENSOR(name, polling_command, value_type) \ - PIPSOLAR_VALUED_ENTITY_(text_sensor::TextSensor, name, polling_command, value_type) +#define PIPSOLAR_BINARY_SENSOR(name, polling_command) \ + PIPSOLAR_ENTITY_(binary_sensor::BinarySensor, name, polling_command) #define PIPSOLAR_TEXT_SENSOR(name, polling_command) PIPSOLAR_ENTITY_(text_sensor::TextSensor, name, polling_command) class Pipsolar : public uart::UARTDevice, public PollingComponent { // QPIGS values - PIPSOLAR_SENSOR(grid_voltage, QPIGS, float) - PIPSOLAR_SENSOR(grid_frequency, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_voltage, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_frequency, QPIGS, float) - PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS, int) - PIPSOLAR_SENSOR(ac_output_active_power, QPIGS, int) - PIPSOLAR_SENSOR(output_load_percent, QPIGS, int) - PIPSOLAR_SENSOR(bus_voltage, QPIGS, int) - PIPSOLAR_SENSOR(battery_voltage, QPIGS, float) - PIPSOLAR_SENSOR(battery_charging_current, QPIGS, int) - PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS, int) - PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS, int) - PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS, float) - PIPSOLAR_SENSOR(pv_input_voltage, QPIGS, float) - PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS, float) - PIPSOLAR_SENSOR(battery_discharge_current, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(load_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS, int) - PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS, int) //.1 scale - PIPSOLAR_SENSOR(eeprom_version, QPIGS, int) - PIPSOLAR_SENSOR(pv_charging_power, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS, int) - PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS, int) + PIPSOLAR_SENSOR(grid_voltage, QPIGS) + PIPSOLAR_SENSOR(grid_frequency, QPIGS) + PIPSOLAR_SENSOR(ac_output_voltage, QPIGS) + PIPSOLAR_SENSOR(ac_output_frequency, QPIGS) + PIPSOLAR_SENSOR(ac_output_apparent_power, QPIGS) + PIPSOLAR_SENSOR(ac_output_active_power, QPIGS) + PIPSOLAR_SENSOR(output_load_percent, QPIGS) + PIPSOLAR_SENSOR(bus_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_charging_current, QPIGS) + PIPSOLAR_SENSOR(battery_capacity_percent, QPIGS) + PIPSOLAR_SENSOR(inverter_heat_sink_temperature, QPIGS) + PIPSOLAR_SENSOR(pv_input_current_for_battery, QPIGS) + PIPSOLAR_SENSOR(pv_input_voltage, QPIGS) + PIPSOLAR_SENSOR(battery_voltage_scc, QPIGS) + PIPSOLAR_SENSOR(battery_discharge_current, QPIGS) + PIPSOLAR_BINARY_SENSOR(add_sbu_priority_version, QPIGS) + PIPSOLAR_BINARY_SENSOR(configuration_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(scc_firmware_version, QPIGS) + PIPSOLAR_BINARY_SENSOR(load_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(battery_voltage_to_steady_while_charging, QPIGS) + PIPSOLAR_BINARY_SENSOR(charging_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(scc_charging_status, QPIGS) + PIPSOLAR_BINARY_SENSOR(ac_charging_status, QPIGS) + PIPSOLAR_SENSOR(battery_voltage_offset_for_fans_on, QPIGS) //.1 scale + PIPSOLAR_SENSOR(eeprom_version, QPIGS) + PIPSOLAR_SENSOR(pv_charging_power, QPIGS) + PIPSOLAR_BINARY_SENSOR(charging_to_floating_mode, QPIGS) + PIPSOLAR_BINARY_SENSOR(switch_on, QPIGS) + PIPSOLAR_BINARY_SENSOR(dustproof_installed, QPIGS) // QPIRI values - PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(grid_rating_current, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI, float) - PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI, int) - PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI, int) - PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_under_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_float_voltage, QPIRI, float) - PIPSOLAR_SENSOR(battery_type, QPIRI, int) - PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI, int) - PIPSOLAR_SENSOR(current_max_charging_current, QPIRI, int) - PIPSOLAR_SENSOR(input_voltage_range, QPIRI, int) - PIPSOLAR_SENSOR(output_source_priority, QPIRI, int) - PIPSOLAR_SENSOR(charger_source_priority, QPIRI, int) - PIPSOLAR_SENSOR(parallel_max_num, QPIRI, int) - PIPSOLAR_SENSOR(machine_type, QPIRI, int) - PIPSOLAR_SENSOR(topology, QPIRI, int) - PIPSOLAR_SENSOR(output_mode, QPIRI, int) - PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI, float) - PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI, int) - PIPSOLAR_SENSOR(pv_power_balance, QPIRI, int) + PIPSOLAR_SENSOR(grid_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(grid_rating_current, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_frequency, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_current, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_apparent_power, QPIRI) + PIPSOLAR_SENSOR(ac_output_rating_active_power, QPIRI) + PIPSOLAR_SENSOR(battery_rating_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_recharge_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_under_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_bulk_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_float_voltage, QPIRI) + PIPSOLAR_SENSOR(battery_type, QPIRI) + PIPSOLAR_SENSOR(current_max_ac_charging_current, QPIRI) + PIPSOLAR_SENSOR(current_max_charging_current, QPIRI) + PIPSOLAR_SENSOR(input_voltage_range, QPIRI) + PIPSOLAR_SENSOR(output_source_priority, QPIRI) + PIPSOLAR_SENSOR(charger_source_priority, QPIRI) + PIPSOLAR_SENSOR(parallel_max_num, QPIRI) + PIPSOLAR_SENSOR(machine_type, QPIRI) + PIPSOLAR_SENSOR(topology, QPIRI) + PIPSOLAR_SENSOR(output_mode, QPIRI) + PIPSOLAR_SENSOR(battery_redischarge_voltage, QPIRI) + PIPSOLAR_SENSOR(pv_ok_condition_for_parallel, QPIRI) + PIPSOLAR_SENSOR(pv_power_balance, QPIRI) // QMOD values - PIPSOLAR_VALUED_TEXT_SENSOR(device_mode, QMOD, char) + PIPSOLAR_TEXT_SENSOR(device_mode, QMOD) // QFLAG values - PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG, int) - PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG, int) + PIPSOLAR_BINARY_SENSOR(silence_buzzer_open_buzzer, QFLAG) + PIPSOLAR_BINARY_SENSOR(overload_bypass_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(lcd_escape_to_default, QFLAG) + PIPSOLAR_BINARY_SENSOR(overload_restart_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(over_temperature_restart_function, QFLAG) + PIPSOLAR_BINARY_SENSOR(backlight_on, QFLAG) + PIPSOLAR_BINARY_SENSOR(alarm_on_when_primary_source_interrupt, QFLAG) + PIPSOLAR_BINARY_SENSOR(fault_code_record, QFLAG) + PIPSOLAR_BINARY_SENSOR(power_saving, QFLAG) // QPIWS values - PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS, int) - PIPSOLAR_BINARY_SENSOR(warnung_low_pv_energy, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS, bool) - PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS, bool) + PIPSOLAR_BINARY_SENSOR(warnings_present, QPIWS) + PIPSOLAR_BINARY_SENSOR(faults_present, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_power_loss, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_fault, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_over, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_under, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_bus_soft_fail, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_line_fail, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_opvshort, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_low, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_voltage_too_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_over_temperature, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_fan_lock, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_voltage_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_low_alarm, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_under_shutdown, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_derating, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_over_load, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_eeprom_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_over_current, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_inverter_soft_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_self_test_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_op_dc_voltage_over, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_battery_open, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_current_sensor_failed, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_battery_short, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_power_limit, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_pv_voltage_high, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_mppt_overload, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_mppt_overload, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_too_low_to_charge, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_dc_dc_over_current, QPIWS) + PIPSOLAR_BINARY_SENSOR(fault_code, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_low_pv_energy, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_high_ac_input_during_bus_soft_start, QPIWS) + PIPSOLAR_BINARY_SENSOR(warning_battery_equalization, QPIWS) PIPSOLAR_TEXT_SENSOR(last_qpigs, QPIGS) PIPSOLAR_TEXT_SENSOR(last_qpiri, QPIRI) @@ -180,14 +185,14 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { PIPSOLAR_SWITCH(pv_ok_condition_for_parallel_switch, QPIRI) PIPSOLAR_SWITCH(pv_power_balance_switch, QPIRI) - void switch_command(const std::string &command); + void queue_command(const std::string &command); void setup() override; void loop() override; void dump_config() override; void update() override; protected: - static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 110; // maximum supported answer length + static const size_t PIPSOLAR_READ_BUFFER_LENGTH = 128; // maximum supported answer length static const size_t COMMAND_QUEUE_LENGTH = 10; static const size_t COMMAND_TIMEOUT = 5000; static const size_t POLLING_COMMANDS_MAX = 15; @@ -198,7 +203,26 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { uint16_t pipsolar_crc_(uint8_t *msg, uint8_t len); bool send_next_command_(); bool send_next_poll_(); - void queue_command_(const char *command, uint8_t length); + + void handle_qpiri_(const char *message); + void handle_qpigs_(const char *message); + void handle_qmod_(const char *message); + void handle_qflag_(const char *message); + void handle_qpiws_(const char *message); + void handle_qt_(const char *message); + void handle_qmn_(const char *message); + + void skip_start_(const char *message, size_t *pos); + void skip_field_(const char *message, size_t *pos); + std::string read_field_(const char *message, size_t *pos); + + void read_float_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor); + void read_int_sensor_(const char *message, size_t *pos, sensor::Sensor *sensor); + + void publish_binary_sensor_(esphome::optional b, binary_sensor::BinarySensor *sensor); + + esphome::optional get_bit_(std::string bits, uint8_t bit_pos); + std::string command_queue_[COMMAND_QUEUE_LENGTH]; uint8_t command_queue_position_ = 0; uint8_t read_buffer_[PIPSOLAR_READ_BUFFER_LENGTH]; @@ -213,11 +237,10 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent { STATE_POLL_COMPLETE = 3, STATE_COMMAND_COMPLETE = 4, STATE_POLL_CHECKED = 5, - STATE_POLL_DECODED = 6, }; uint8_t last_polling_command_ = 0; - PollingCommand used_polling_commands_[POLLING_COMMANDS_MAX]; + PollingCommand enabled_polling_commands_[POLLING_COMMANDS_MAX]; }; } // namespace pipsolar diff --git a/esphome/components/pipsolar/switch/pipsolar_switch.cpp b/esphome/components/pipsolar/switch/pipsolar_switch.cpp index be7763226b..649d951618 100644 --- a/esphome/components/pipsolar/switch/pipsolar_switch.cpp +++ b/esphome/components/pipsolar/switch/pipsolar_switch.cpp @@ -11,11 +11,11 @@ void PipsolarSwitch::dump_config() { LOG_SWITCH("", "Pipsolar Switch", this); } void PipsolarSwitch::write_state(bool state) { if (state) { if (!this->on_command_.empty()) { - this->parent_->switch_command(this->on_command_); + this->parent_->queue_command(this->on_command_); } } else { if (!this->off_command_.empty()) { - this->parent_->switch_command(this->off_command_); + this->parent_->queue_command(this->off_command_); } } } diff --git a/esphome/components/script/script.h b/esphome/components/script/script.h index 3a97a26985..58fb67a3ea 100644 --- a/esphome/components/script/script.h +++ b/esphome/components/script/script.h @@ -111,24 +111,26 @@ template class RestartScript : public Script { template class QueueingScript : public Script, public Component { public: void execute(Ts... x) override { - this->lazy_init_queue_(); - if (this->is_action_running() || this->num_queued_ > 0) { // num_queued_ is the number of *queued* instances (waiting, not including currently running) // max_runs_ is the maximum *total* instances (running + queued) // So we reject when num_queued_ + 1 >= max_runs_ (queued + running >= max) if (this->num_queued_ + 1 >= this->max_runs_) { - this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' maximum number of queued runs exceeded!"), + this->esp_logw_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' max instances (running + queued) reached!"), LOG_STR_ARG(this->name_)); return; } + // Initialize queue on first queued item (after capacity check) + this->lazy_init_queue_(); + this->esp_logd_(__LINE__, ESPHOME_LOG_FORMAT("Script '%s' queueing new instance (mode: queued)"), LOG_STR_ARG(this->name_)); - // Ring buffer: write to (queue_front_ + num_queued_) % (max_runs_ - 1) - size_t write_pos = (this->queue_front_ + this->num_queued_) % (this->max_runs_ - 1); - // Use reset() to replace the unique_ptr - this->var_queue_[write_pos].reset(new std::tuple(std::make_tuple(x...))); + // Ring buffer: write to (queue_front_ + num_queued_) % queue_capacity + const size_t queue_capacity = static_cast(this->max_runs_ - 1); + size_t write_pos = (this->queue_front_ + this->num_queued_) % queue_capacity; + // Use std::make_unique to replace the unique_ptr + this->var_queue_[write_pos] = std::make_unique>(x...); this->num_queued_++; return; } @@ -140,9 +142,8 @@ template class QueueingScript : public Script, public Com void stop() override { // Clear all queued items to free memory immediately - for (int i = 0; i < this->max_runs_ - 1; i++) { - this->var_queue_[i].reset(); - } + // Resetting the array automatically destroys all unique_ptrs and their contents + this->var_queue_.reset(); this->num_queued_ = 0; this->queue_front_ = 0; Script::stop(); @@ -152,8 +153,9 @@ template class QueueingScript : public Script, public Com if (this->num_queued_ != 0 && !this->is_action_running()) { // Dequeue: decrement count, move tuple out (frees slot), advance read position this->num_queued_--; + const size_t queue_capacity = static_cast(this->max_runs_ - 1); auto tuple_ptr = std::move(this->var_queue_[this->queue_front_]); - this->queue_front_ = (this->queue_front_ + 1) % (this->max_runs_ - 1); + this->queue_front_ = (this->queue_front_ + 1) % queue_capacity; this->trigger_tuple_(*tuple_ptr, typename gens::type()); } } @@ -164,13 +166,10 @@ template class QueueingScript : public Script, public Com // Lazy init queue on first use - avoids setup() ordering issues and saves memory // if script is never executed during this boot cycle inline void lazy_init_queue_() { - if (this->var_queue_.capacity() == 0) { - // Allocate max_runs_ - 1 slots for queued items (running item is separate) - this->var_queue_.init(this->max_runs_ - 1); - // Initialize all unique_ptr slots to nullptr - for (int i = 0; i < this->max_runs_ - 1; i++) { - this->var_queue_.push_back(nullptr); - } + if (!this->var_queue_) { + // Allocate array of max_runs_ - 1 slots for queued items (running item is separate) + // unique_ptr array is zero-initialized, so all slots start as nullptr + this->var_queue_ = std::make_unique>[]>(this->max_runs_ - 1); } } @@ -181,7 +180,7 @@ template class QueueingScript : public Script, public Com int num_queued_ = 0; // Number of queued instances (not including currently running) int max_runs_ = 0; // Maximum total instances (running + queued) size_t queue_front_ = 0; // Ring buffer read position (next item to execute) - FixedVector>> var_queue_; // Ring buffer of queued parameters + std::unique_ptr>[]> var_queue_; // Ring buffer of queued parameters }; /** A script type that executes new instances in parallel. diff --git a/esphome/components/sensor/__init__.py b/esphome/components/sensor/__init__.py index d9724a741d..7e91bb83c4 100644 --- a/esphome/components/sensor/__init__.py +++ b/esphome/components/sensor/__init__.py @@ -28,6 +28,8 @@ from esphome.const import ( CONF_ON_RAW_VALUE, CONF_ON_VALUE, CONF_ON_VALUE_RANGE, + CONF_OPTIMISTIC, + CONF_PERIOD, CONF_QUANTILE, CONF_SEND_EVERY, CONF_SEND_FIRST_AT, @@ -261,9 +263,12 @@ ThrottleAverageFilter = sensor_ns.class_("ThrottleAverageFilter", Filter, cg.Com LambdaFilter = sensor_ns.class_("LambdaFilter", Filter) OffsetFilter = sensor_ns.class_("OffsetFilter", Filter) MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter) -FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter) +ValueListFilter = sensor_ns.class_("ValueListFilter", Filter) +FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", ValueListFilter) ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter) -ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter) +ThrottleWithPriorityFilter = sensor_ns.class_( + "ThrottleWithPriorityFilter", ValueListFilter +) TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component) DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component) HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component) @@ -641,10 +646,29 @@ async def throttle_with_priority_filter_to_code(config, filter_id): return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_) +HEARTBEAT_SCHEMA = cv.Schema( + { + cv.Required(CONF_PERIOD): cv.positive_time_period_milliseconds, + cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean, + } +) + + @FILTER_REGISTRY.register( - "heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds + "heartbeat", + HeartbeatFilter, + cv.Any( + cv.positive_time_period_milliseconds, + HEARTBEAT_SCHEMA, + ), ) async def heartbeat_filter_to_code(config, filter_id): + if isinstance(config, dict): + var = cg.new_Pvariable(filter_id, config[CONF_PERIOD]) + await cg.register_component(var, {}) + cg.add(var.set_optimistic(config[CONF_OPTIMISTIC])) + return var + var = cg.new_Pvariable(filter_id, config) await cg.register_component(var, {}) return var diff --git a/esphome/components/sensor/filter.cpp b/esphome/components/sensor/filter.cpp index 1cc744e3b5..65d8dea31c 100644 --- a/esphome/components/sensor/filter.cpp +++ b/esphome/components/sensor/filter.cpp @@ -65,32 +65,41 @@ optional SlidingWindowFilter::new_value(float value) { } // SortedWindowFilter -FixedVector SortedWindowFilter::get_sorted_values_() { +FixedVector SortedWindowFilter::get_window_values_() { // Copy window without NaN values using FixedVector (no heap allocation) - FixedVector sorted_values; - sorted_values.init(this->window_count_); + // Returns unsorted values - caller will use std::nth_element for partial sorting as needed + FixedVector values; + values.init(this->window_count_); for (size_t i = 0; i < this->window_count_; i++) { float v = this->window_[i]; if (!std::isnan(v)) { - sorted_values.push_back(v); + values.push_back(v); } } - std::sort(sorted_values.begin(), sorted_values.end()); - return sorted_values; + return values; } // MedianFilter float MedianFilter::compute_result() { - FixedVector sorted_values = this->get_sorted_values_(); - if (sorted_values.empty()) + FixedVector values = this->get_window_values_(); + if (values.empty()) return NAN; - size_t size = sorted_values.size(); + size_t size = values.size(); + size_t mid = size / 2; + if (size % 2) { - return sorted_values[size / 2]; - } else { - return (sorted_values[size / 2] + sorted_values[(size / 2) - 1]) / 2.0f; + // Odd number of elements - use nth_element to find middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + return values[mid]; } + // Even number of elements - need both middle elements + // Use nth_element to find upper middle element + std::nth_element(values.begin(), values.begin() + mid, values.end()); + float upper = values[mid]; + // Find the maximum of the lower half (which is now everything before mid) + float lower = *std::max_element(values.begin(), values.begin() + mid); + return (lower + upper) / 2.0f; } // SkipInitialFilter @@ -111,13 +120,16 @@ QuantileFilter::QuantileFilter(size_t window_size, size_t send_every, size_t sen : SortedWindowFilter(window_size, send_every, send_first_at), quantile_(quantile) {} float QuantileFilter::compute_result() { - FixedVector sorted_values = this->get_sorted_values_(); - if (sorted_values.empty()) + FixedVector values = this->get_window_values_(); + if (values.empty()) return NAN; - size_t position = ceilf(sorted_values.size() * this->quantile_) - 1; - ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, sorted_values.size()); - return sorted_values[position]; + size_t position = ceilf(values.size() * this->quantile_) - 1; + ESP_LOGVV(TAG, "QuantileFilter(%p)::position: %zu/%zu", this, position + 1, values.size()); + + // Use nth_element to find the quantile element (O(n) instead of O(n log n)) + std::nth_element(values.begin(), values.begin() + position, values.end()); + return values[position]; } // MinFilter @@ -216,27 +228,40 @@ MultiplyFilter::MultiplyFilter(TemplatableValue multiplier) : multiplier_ optional MultiplyFilter::new_value(float value) { return value * this->multiplier_.value(); } -// FilterOutValueFilter -FilterOutValueFilter::FilterOutValueFilter(std::vector> values_to_filter_out) - : values_to_filter_out_(std::move(values_to_filter_out)) {} +// ValueListFilter (base class) +ValueListFilter::ValueListFilter(std::initializer_list> values) : values_(values) {} -optional FilterOutValueFilter::new_value(float value) { +bool ValueListFilter::value_matches_any_(float sensor_value) { int8_t accuracy = this->parent_->get_accuracy_decimals(); float accuracy_mult = powf(10.0f, accuracy); - for (auto filter_value : this->values_to_filter_out_) { - if (std::isnan(filter_value.value())) { - if (std::isnan(value)) { - return {}; - } + float rounded_sensor = roundf(accuracy_mult * sensor_value); + + for (auto &filter_value : this->values_) { + float fv = filter_value.value(); + + // Handle NaN comparison + if (std::isnan(fv)) { + if (std::isnan(sensor_value)) + return true; continue; } - float rounded_filter_out = roundf(accuracy_mult * filter_value.value()); - float rounded_value = roundf(accuracy_mult * value); - if (rounded_filter_out == rounded_value) { - return {}; - } + + // Compare rounded values + if (roundf(accuracy_mult * fv) == rounded_sensor) + return true; } - return value; + + return false; +} + +// FilterOutValueFilter +FilterOutValueFilter::FilterOutValueFilter(std::initializer_list> values_to_filter_out) + : ValueListFilter(values_to_filter_out) {} + +optional FilterOutValueFilter::new_value(float value) { + if (this->value_matches_any_(value)) + return {}; // Filter out + return value; // Pass through } // ThrottleFilter @@ -251,33 +276,15 @@ optional ThrottleFilter::new_value(float value) { } // ThrottleWithPriorityFilter -ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs, - std::vector> prioritized_values) - : min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {} +ThrottleWithPriorityFilter::ThrottleWithPriorityFilter( + uint32_t min_time_between_inputs, std::initializer_list> prioritized_values) + : ValueListFilter(prioritized_values), min_time_between_inputs_(min_time_between_inputs) {} optional ThrottleWithPriorityFilter::new_value(float value) { - bool is_prioritized_value = false; - int8_t accuracy = this->parent_->get_accuracy_decimals(); - float accuracy_mult = powf(10.0f, accuracy); const uint32_t now = App.get_loop_component_start_time(); - // First, determine if the new value is one of the prioritized values - for (auto prioritized_value : this->prioritized_values_) { - if (std::isnan(prioritized_value.value())) { - if (std::isnan(value)) { - is_prioritized_value = true; - break; - } - continue; - } - float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value()); - float rounded_value = roundf(accuracy_mult * value); - if (rounded_prioritized_value == rounded_value) { - is_prioritized_value = true; - break; - } - } - // Finally, determine if the new value should be throttled and pass it through if not - if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) { + // Allow value through if: no previous input, time expired, or is prioritized + if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || + this->value_matches_any_(value)) { this->last_input_ = now; return value; } @@ -306,7 +313,7 @@ optional DeltaFilter::new_value(float value) { } // OrFilter -OrFilter::OrFilter(std::vector filters) : filters_(std::move(filters)), phi_(this) {} +OrFilter::OrFilter(std::initializer_list filters) : filters_(filters), phi_(this) {} OrFilter::PhiNode::PhiNode(OrFilter *or_parent) : or_parent_(or_parent) {} optional OrFilter::PhiNode::new_value(float value) { @@ -319,14 +326,14 @@ optional OrFilter::PhiNode::new_value(float value) { } optional OrFilter::new_value(float value) { this->has_value_ = false; - for (Filter *filter : this->filters_) + for (auto *filter : this->filters_) filter->input(value); return {}; } void OrFilter::initialize(Sensor *parent, Filter *next) { Filter::initialize(parent, next); - for (Filter *filter : this->filters_) { + for (auto *filter : this->filters_) { filter->initialize(parent, &this->phi_); } this->phi_.initialize(parent, nullptr); @@ -365,8 +372,12 @@ optional HeartbeatFilter::new_value(float value) { this->last_input_ = value; this->has_value_ = true; + if (this->optimistic_) { + return value; + } return {}; } + void HeartbeatFilter::setup() { this->set_interval("heartbeat", this->time_period_, [this]() { ESP_LOGVV(TAG, "HeartbeatFilter(%p)::interval(has_value=%s, last_input=%f)", this, YESNO(this->has_value_), @@ -377,20 +388,27 @@ void HeartbeatFilter::setup() { this->output(this->last_input_); }); } + float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; } +CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list> linear_functions) + : linear_functions_(linear_functions) {} + optional CalibrateLinearFilter::new_value(float value) { - for (std::array f : this->linear_functions_) { + for (const auto &f : this->linear_functions_) { if (!std::isfinite(f[2]) || value < f[2]) return (value * f[0]) + f[1]; } return NAN; } +CalibratePolynomialFilter::CalibratePolynomialFilter(std::initializer_list coefficients) + : coefficients_(coefficients) {} + optional CalibratePolynomialFilter::new_value(float value) { float res = 0.0f; float x = 1.0f; - for (float coefficient : this->coefficients_) { + for (const auto &coefficient : this->coefficients_) { res += x * coefficient; x *= value; } diff --git a/esphome/components/sensor/filter.h b/esphome/components/sensor/filter.h index d99cd79f05..ecd55308d1 100644 --- a/esphome/components/sensor/filter.h +++ b/esphome/components/sensor/filter.h @@ -95,17 +95,17 @@ class MinMaxFilter : public SlidingWindowFilter { /** Base class for filters that need a sorted window (Median, Quantile). * - * Extends SlidingWindowFilter to provide a helper that creates a sorted copy - * of non-NaN values from the window. + * Extends SlidingWindowFilter to provide a helper that filters out NaN values. + * Derived classes use std::nth_element for efficient partial sorting. */ class SortedWindowFilter : public SlidingWindowFilter { public: using SlidingWindowFilter::SlidingWindowFilter; protected: - /// Helper to get sorted non-NaN values from the window + /// Helper to get non-NaN values from the window (not sorted - caller will use nth_element) /// Returns empty FixedVector if all values are NaN - FixedVector get_sorted_values_(); + FixedVector get_window_values_(); }; /** Simple quantile filter. @@ -317,15 +317,28 @@ class MultiplyFilter : public Filter { TemplatableValue multiplier_; }; +/** Base class for filters that compare sensor values against a list of configured values. + * + * This base class provides common functionality for filters that need to check if a sensor + * value matches any value in a configured list, with proper handling of NaN values and + * accuracy-based rounding for comparisons. + */ +class ValueListFilter : public Filter { + protected: + explicit ValueListFilter(std::initializer_list> values); + + /// Check if sensor value matches any configured value (with accuracy rounding) + bool value_matches_any_(float sensor_value); + + FixedVector> values_; +}; + /// A simple filter that only forwards the filter chain if it doesn't receive `value_to_filter_out`. -class FilterOutValueFilter : public Filter { +class FilterOutValueFilter : public ValueListFilter { public: - explicit FilterOutValueFilter(std::vector> values_to_filter_out); + explicit FilterOutValueFilter(std::initializer_list> values_to_filter_out); optional new_value(float value) override; - - protected: - std::vector> values_to_filter_out_; }; class ThrottleFilter : public Filter { @@ -340,17 +353,16 @@ class ThrottleFilter : public Filter { }; /// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`. -class ThrottleWithPriorityFilter : public Filter { +class ThrottleWithPriorityFilter : public ValueListFilter { public: explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs, - std::vector> prioritized_values); + std::initializer_list> prioritized_values); optional new_value(float value) override; protected: uint32_t last_input_{0}; uint32_t min_time_between_inputs_; - std::vector> prioritized_values_; }; class TimeoutFilter : public Filter, public Component { @@ -384,15 +396,16 @@ class HeartbeatFilter : public Filter, public Component { explicit HeartbeatFilter(uint32_t time_period); void setup() override; - optional new_value(float value) override; - float get_setup_priority() const override; + void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; } + protected: uint32_t time_period_; float last_input_; bool has_value_{false}; + bool optimistic_{false}; }; class DeltaFilter : public Filter { @@ -410,7 +423,7 @@ class DeltaFilter : public Filter { class OrFilter : public Filter { public: - explicit OrFilter(std::vector filters); + explicit OrFilter(std::initializer_list filters); void initialize(Sensor *parent, Filter *next) override; @@ -426,28 +439,27 @@ class OrFilter : public Filter { OrFilter *or_parent_; }; - std::vector filters_; + FixedVector filters_; PhiNode phi_; bool has_value_{false}; }; class CalibrateLinearFilter : public Filter { public: - CalibrateLinearFilter(std::vector> linear_functions) - : linear_functions_(std::move(linear_functions)) {} + explicit CalibrateLinearFilter(std::initializer_list> linear_functions); optional new_value(float value) override; protected: - std::vector> linear_functions_; + FixedVector> linear_functions_; }; class CalibratePolynomialFilter : public Filter { public: - CalibratePolynomialFilter(std::vector coefficients) : coefficients_(std::move(coefficients)) {} + explicit CalibratePolynomialFilter(std::initializer_list coefficients); optional new_value(float value) override; protected: - std::vector coefficients_; + FixedVector coefficients_; }; class ClampFilter : public Filter { diff --git a/esphome/components/sensor/sensor.cpp b/esphome/components/sensor/sensor.cpp index 4292b8c0bc..92da4345b7 100644 --- a/esphome/components/sensor/sensor.cpp +++ b/esphome/components/sensor/sensor.cpp @@ -107,12 +107,12 @@ void Sensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void Sensor::add_filters(const std::vector &filters) { +void Sensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void Sensor::set_filters(const std::vector &filters) { +void Sensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/sensor/sensor.h b/esphome/components/sensor/sensor.h index f3fa601a5e..a4210e5e6c 100644 --- a/esphome/components/sensor/sensor.h +++ b/esphome/components/sensor/sensor.h @@ -6,7 +6,7 @@ #include "esphome/core/log.h" #include "esphome/components/sensor/filter.h" -#include +#include #include namespace esphome { @@ -77,10 +77,10 @@ class Sensor : public EntityBase, public EntityBase_DeviceClass, public EntityBa * SlidingWindowMovingAverageFilter(15, 15), // average over last 15 values * }); */ - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); diff --git a/esphome/components/socket/__init__.py b/esphome/components/socket/__init__.py index e085a09eac..e6a4cfc07f 100644 --- a/esphome/components/socket/__init__.py +++ b/esphome/components/socket/__init__.py @@ -1,3 +1,5 @@ +from collections.abc import Callable, MutableMapping + import esphome.codegen as cg import esphome.config_validation as cv from esphome.core import CORE @@ -9,6 +11,32 @@ IMPLEMENTATION_LWIP_TCP = "lwip_tcp" IMPLEMENTATION_LWIP_SOCKETS = "lwip_sockets" IMPLEMENTATION_BSD_SOCKETS = "bsd_sockets" +# Socket tracking infrastructure +# Components register their socket needs and platforms read this to configure appropriately +KEY_SOCKET_CONSUMERS = "socket_consumers" + + +def consume_sockets( + value: int, consumer: str +) -> Callable[[MutableMapping], MutableMapping]: + """Register socket usage for a component. + + Args: + value: Number of sockets needed by the component + consumer: Name of the component consuming the sockets + + Returns: + A validator function that records the socket usage + """ + + def _consume_sockets(config: MutableMapping) -> MutableMapping: + consumers: dict[str, int] = CORE.data.setdefault(KEY_SOCKET_CONSUMERS, {}) + consumers[consumer] = consumers.get(consumer, 0) + value + return config + + return _consume_sockets + + CONFIG_SCHEMA = cv.Schema( { cv.SplitDefault( diff --git a/esphome/components/speed/fan/speed_fan.h b/esphome/components/speed/fan/speed_fan.h index 6537bce3f6..454b7fc136 100644 --- a/esphome/components/speed/fan/speed_fan.h +++ b/esphome/components/speed/fan/speed_fan.h @@ -18,7 +18,7 @@ class SpeedFan : public Component, public fan::Fan { void set_output(output::FloatOutput *output) { this->output_ = output; } void set_oscillating(output::BinaryOutput *oscillating) { this->oscillating_ = oscillating; } void set_direction(output::BinaryOutput *direction) { this->direction_ = direction; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const std::vector &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -30,7 +30,7 @@ class SpeedFan : public Component, public fan::Fan { output::BinaryOutput *direction_{nullptr}; int speed_count_{}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace speed diff --git a/esphome/components/statsd/statsd.h b/esphome/components/statsd/statsd.h index 34f84cbe00..eab77a7a6e 100644 --- a/esphome/components/statsd/statsd.h +++ b/esphome/components/statsd/statsd.h @@ -28,21 +28,6 @@ namespace esphome { namespace statsd { -using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR }; - -using sensors_t = struct { - const char *name; - sensor_type_t type; - union { -#ifdef USE_SENSOR - esphome::sensor::Sensor *sensor; -#endif -#ifdef USE_BINARY_SENSOR - esphome::binary_sensor::BinarySensor *binary_sensor; -#endif - }; -}; - class StatsdComponent : public PollingComponent { public: ~StatsdComponent(); @@ -71,6 +56,20 @@ class StatsdComponent : public PollingComponent { const char *prefix_; uint16_t port_; + using sensor_type_t = enum { TYPE_SENSOR, TYPE_BINARY_SENSOR }; + using sensors_t = struct { + const char *name; + sensor_type_t type; + union { +#ifdef USE_SENSOR + esphome::sensor::Sensor *sensor; +#endif +#ifdef USE_BINARY_SENSOR + esphome::binary_sensor::BinarySensor *binary_sensor; +#endif + }; + }; + std::vector sensors_; #ifdef USE_ESP8266 diff --git a/esphome/components/substitutions/__init__.py b/esphome/components/substitutions/__init__.py index e6bcdc063a..098d56bfad 100644 --- a/esphome/components/substitutions/__init__.py +++ b/esphome/components/substitutions/__init__.py @@ -6,7 +6,7 @@ import esphome.config_validation as cv from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base -from .jinja import Jinja, JinjaStr, TemplateError, TemplateRuntimeError, has_jinja +from .jinja import Jinja, JinjaError, JinjaStr, has_jinja CODEOWNERS = ["@esphome/core"] _LOGGER = logging.getLogger(__name__) @@ -57,17 +57,12 @@ def _expand_jinja(value, orig_value, path, jinja, ignore_missing): "->".join(str(x) for x in path), err.message, ) - except ( - TemplateError, - TemplateRuntimeError, - RuntimeError, - ArithmeticError, - AttributeError, - TypeError, - ) as err: + except JinjaError as err: raise cv.Invalid( - f"{type(err).__name__} Error evaluating jinja expression '{value}': {str(err)}." - f" See {'->'.join(str(x) for x in path)}", + f"{err.error_name()} Error evaluating jinja expression '{value}': {str(err.parent())}." + f"\nEvaluation stack: (most recent evaluation last)\n{err.stack_trace_str()}" + f"\nRelevant context:\n{err.context_trace_str()}" + f"\nSee {'->'.join(str(x) for x in path)}", path, ) return value diff --git a/esphome/components/substitutions/jinja.py b/esphome/components/substitutions/jinja.py index e7164d8fff..dde0162993 100644 --- a/esphome/components/substitutions/jinja.py +++ b/esphome/components/substitutions/jinja.py @@ -6,6 +6,8 @@ import re import jinja2 as jinja from jinja2.sandbox import SandboxedEnvironment +from esphome.yaml_util import ESPLiteralValue + TemplateError = jinja.TemplateError TemplateSyntaxError = jinja.TemplateSyntaxError TemplateRuntimeError = jinja.TemplateRuntimeError @@ -26,18 +28,20 @@ def has_jinja(st): return detect_jinja_re.search(st) is not None -# SAFE_GLOBAL_FUNCTIONS defines a allowlist of built-in functions that are considered safe to expose +# SAFE_GLOBALS defines a allowlist of built-in functions or modules that are considered safe to expose # in Jinja templates or other sandboxed evaluation contexts. Only functions that do not allow # arbitrary code execution, file access, or other security risks are included. # # The following functions are considered safe: +# - math: The entire math module is injected, allowing access to mathematical functions like sin, cos, sqrt, etc. # - ord: Converts a character to its Unicode code point integer. # - chr: Converts an integer to its corresponding Unicode character. # - len: Returns the length of a sequence or collection. # # These functions were chosen because they are pure, have no side effects, and do not provide access # to the file system, environment, or other potentially sensitive resources. -SAFE_GLOBAL_FUNCTIONS = { +SAFE_GLOBALS = { + "math": math, # Inject entire math module "ord": ord, "chr": chr, "len": len, @@ -56,22 +60,62 @@ class JinjaStr(str): later in the main substitutions pass. """ + Undefined = object() + def __new__(cls, value: str, upvalues=None): - obj = super().__new__(cls, value) - obj.upvalues = upvalues or {} + if isinstance(value, JinjaStr): + base = str(value) + merged = {**value.upvalues, **(upvalues or {})} + else: + base = value + merged = dict(upvalues or {}) + obj = super().__new__(cls, base) + obj.upvalues = merged + obj.result = JinjaStr.Undefined return obj - def __init__(self, value: str, upvalues=None): - self.upvalues = upvalues or {} + +class JinjaError(Exception): + def __init__(self, context_trace: dict, expr: str): + self.context_trace = context_trace + self.eval_stack = [expr] + + def parent(self): + return self.__context__ + + def error_name(self): + return type(self.parent()).__name__ + + def context_trace_str(self): + return "\n".join( + f" {k} = {repr(v)} ({type(v).__name__})" + for k, v in self.context_trace.items() + ) + + def stack_trace_str(self): + return "\n".join( + f" {len(self.eval_stack) - i}: {expr}{i == 0 and ' <-- ' + self.error_name() or ''}" + for i, expr in enumerate(self.eval_stack) + ) -class Jinja: +class TrackerContext(jinja.runtime.Context): + def resolve_or_missing(self, key): + val = super().resolve_or_missing(key) + if isinstance(val, JinjaStr): + self.environment.context_trace[key] = val + val, _ = self.environment.expand(val) + self.environment.context_trace[key] = val + return val + + +class Jinja(SandboxedEnvironment): """ Wraps a Jinja environment """ def __init__(self, context_vars): - self.env = SandboxedEnvironment( + super().__init__( trim_blocks=True, lstrip_blocks=True, block_start_string="<%", @@ -82,13 +126,20 @@ class Jinja: variable_end_string="}", undefined=jinja.StrictUndefined, ) - self.env.add_extension("jinja2.ext.do") - self.env.globals["math"] = math # Inject entire math module + self.context_class = TrackerContext + self.add_extension("jinja2.ext.do") + self.context_trace = {} self.context_vars = {**context_vars} - self.env.globals = { - **self.env.globals, + for k, v in self.context_vars.items(): + if isinstance(v, ESPLiteralValue): + continue + if isinstance(v, str) and not isinstance(v, JinjaStr) and has_jinja(v): + self.context_vars[k] = JinjaStr(v, self.context_vars) + + self.globals = { + **self.globals, **self.context_vars, - **SAFE_GLOBAL_FUNCTIONS, + **SAFE_GLOBALS, } def safe_eval(self, expr): @@ -110,23 +161,43 @@ class Jinja: result = None override_vars = {} if isinstance(content_str, JinjaStr): + if content_str.result is not JinjaStr.Undefined: + return content_str.result, None # If `value` is already a JinjaStr, it means we are trying to evaluate it again # in a parent pass. # Hopefully, all required variables are visible now. override_vars = content_str.upvalues + + old_trace = self.context_trace + self.context_trace = {} try: - template = self.env.from_string(content_str) + template = self.from_string(content_str) result = self.safe_eval(template.render(override_vars)) if isinstance(result, Undefined): - # This happens when the expression is simply an undefined variable. Jinja does not - # raise an exception, instead we get "Undefined". - # Trigger an UndefinedError exception so we skip to below. - print("" + result) + print("" + result) # force a UndefinedError exception except (TemplateSyntaxError, UndefinedError) as err: # `content_str` contains a Jinja expression that refers to a variable that is undefined # in this scope. Perhaps it refers to a root substitution that is not visible yet. - # Therefore, return the original `content_str` as a JinjaStr, which contains the variables + # Therefore, return `content_str` as a JinjaStr, which contains the variables # that are actually visible to it at this point to postpone evaluation. return JinjaStr(content_str, {**self.context_vars, **override_vars}), err + except JinjaError as err: + err.context_trace = {**self.context_trace, **err.context_trace} + err.eval_stack.append(content_str) + raise err + except ( + TemplateError, + TemplateRuntimeError, + RuntimeError, + ArithmeticError, + AttributeError, + TypeError, + ) as err: + raise JinjaError(self.context_trace, content_str) from err + finally: + self.context_trace = old_trace + + if isinstance(content_str, JinjaStr): + content_str.result = result return result, None diff --git a/esphome/components/template/fan/template_fan.h b/esphome/components/template/fan/template_fan.h index 7f5305ca48..5d780f61f0 100644 --- a/esphome/components/template/fan/template_fan.h +++ b/esphome/components/template/fan/template_fan.h @@ -1,6 +1,6 @@ #pragma once -#include +#include #include "esphome/core/component.h" #include "esphome/components/fan/fan.h" @@ -16,7 +16,7 @@ class TemplateFan : public Component, public fan::Fan { void set_has_direction(bool has_direction) { this->has_direction_ = has_direction; } void set_has_oscillating(bool has_oscillating) { this->has_oscillating_ = has_oscillating; } void set_speed_count(int count) { this->speed_count_ = count; } - void set_preset_modes(const std::set &presets) { this->preset_modes_ = presets; } + void set_preset_modes(const std::initializer_list &presets) { this->preset_modes_ = presets; } fan::FanTraits get_traits() override { return this->traits_; } protected: @@ -26,7 +26,7 @@ class TemplateFan : public Component, public fan::Fan { bool has_direction_{false}; int speed_count_{0}; fan::FanTraits traits_; - std::set preset_modes_{}; + std::vector preset_modes_{}; }; } // namespace template_ diff --git a/esphome/components/text_sensor/__init__.py b/esphome/components/text_sensor/__init__.py index f7b3b5c55e..7a9e947abd 100644 --- a/esphome/components/text_sensor/__init__.py +++ b/esphome/components/text_sensor/__init__.py @@ -110,17 +110,28 @@ def validate_mapping(value): "substitute", SubstituteFilter, cv.ensure_list(validate_mapping) ) async def substitute_filter_to_code(config, filter_id): - from_strings = [conf[CONF_FROM] for conf in config] - to_strings = [conf[CONF_TO] for conf in config] - return cg.new_Pvariable(filter_id, from_strings, to_strings) + substitutions = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, substitutions) @FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping)) async def map_filter_to_code(config, filter_id): - map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string) - return cg.new_Pvariable( - filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config]) - ) + mappings = [ + cg.StructInitializer( + cg.MockObj("Substitution", "esphome::text_sensor::"), + ("from", conf[CONF_FROM]), + ("to", conf[CONF_TO]), + ) + for conf in config + ] + return cg.new_Pvariable(filter_id, mappings) validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_") diff --git a/esphome/components/text_sensor/filter.cpp b/esphome/components/text_sensor/filter.cpp index 80edae2b6c..a242b43b1c 100644 --- a/esphome/components/text_sensor/filter.cpp +++ b/esphome/components/text_sensor/filter.cpp @@ -62,19 +62,27 @@ optional AppendFilter::new_value(std::string value) { return value optional PrependFilter::new_value(std::string value) { return this->prefix_ + value; } // Substitute +SubstituteFilter::SubstituteFilter(const std::initializer_list &substitutions) + : substitutions_(substitutions) {} + optional SubstituteFilter::new_value(std::string value) { std::size_t pos; - for (size_t i = 0; i < this->from_strings_.size(); i++) { - while ((pos = value.find(this->from_strings_[i])) != std::string::npos) - value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]); + for (const auto &sub : this->substitutions_) { + while ((pos = value.find(sub.from)) != std::string::npos) + value.replace(pos, sub.from.size(), sub.to); } return value; } // Map +MapFilter::MapFilter(const std::initializer_list &mappings) : mappings_(mappings) {} + optional MapFilter::new_value(std::string value) { - auto item = mappings_.find(value); - return item == mappings_.end() ? value : item->second; + for (const auto &mapping : this->mappings_) { + if (mapping.from == value) + return mapping.to; + } + return value; // Pass through if no match } } // namespace text_sensor diff --git a/esphome/components/text_sensor/filter.h b/esphome/components/text_sensor/filter.h index 2de9010b88..c77c221235 100644 --- a/esphome/components/text_sensor/filter.h +++ b/esphome/components/text_sensor/filter.h @@ -2,10 +2,6 @@ #include "esphome/core/component.h" #include "esphome/core/helpers.h" -#include -#include -#include -#include namespace esphome { namespace text_sensor { @@ -98,26 +94,52 @@ class PrependFilter : public Filter { std::string prefix_; }; +struct Substitution { + std::string from; + std::string to; +}; + /// A simple filter that replaces a substring with another substring class SubstituteFilter : public Filter { public: - SubstituteFilter(std::vector from_strings, std::vector to_strings) - : from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {} + explicit SubstituteFilter(const std::initializer_list &substitutions); optional new_value(std::string value) override; protected: - std::vector from_strings_; - std::vector to_strings_; + FixedVector substitutions_; }; -/// A filter that maps values from one set to another +/** A filter that maps values from one set to another + * + * Uses linear search instead of std::map for typical small datasets (2-20 mappings). + * Linear search on contiguous memory is faster than red-black tree lookups when: + * - Dataset is small (< ~30 items) + * - Memory is contiguous (cache-friendly, better CPU cache utilization) + * - No pointer chasing overhead (tree node traversal) + * - String comparison cost dominates lookup time + * + * Benchmark results (see benchmark_map_filter.cpp): + * - 2 mappings: Linear 1.26x faster than std::map + * - 5 mappings: Linear 2.25x faster than std::map + * - 10 mappings: Linear 1.83x faster than std::map + * - 20 mappings: Linear 1.59x faster than std::map + * - 30 mappings: Linear 1.09x faster than std::map + * - 40 mappings: std::map 1.27x faster than Linear (break-even) + * + * Benefits over std::map: + * - ~2KB smaller flash (no red-black tree code) + * - ~24-32 bytes less RAM per mapping (no tree node overhead) + * - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare) + * + * Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20 + */ class MapFilter : public Filter { public: - MapFilter(std::map mappings) : mappings_(std::move(mappings)) {} + explicit MapFilter(const std::initializer_list &mappings); optional new_value(std::string value) override; protected: - std::map mappings_; + FixedVector mappings_; }; } // namespace text_sensor diff --git a/esphome/components/text_sensor/text_sensor.cpp b/esphome/components/text_sensor/text_sensor.cpp index 17bf20466e..0294d65861 100644 --- a/esphome/components/text_sensor/text_sensor.cpp +++ b/esphome/components/text_sensor/text_sensor.cpp @@ -51,12 +51,12 @@ void TextSensor::add_filter(Filter *filter) { } filter->initialize(this, nullptr); } -void TextSensor::add_filters(const std::vector &filters) { +void TextSensor::add_filters(std::initializer_list filters) { for (Filter *filter : filters) { this->add_filter(filter); } } -void TextSensor::set_filters(const std::vector &filters) { +void TextSensor::set_filters(std::initializer_list filters) { this->clear_filters(); this->add_filters(filters); } diff --git a/esphome/components/text_sensor/text_sensor.h b/esphome/components/text_sensor/text_sensor.h index abbea27b59..db2e857ae3 100644 --- a/esphome/components/text_sensor/text_sensor.h +++ b/esphome/components/text_sensor/text_sensor.h @@ -5,7 +5,7 @@ #include "esphome/core/helpers.h" #include "esphome/components/text_sensor/filter.h" -#include +#include #include namespace esphome { @@ -37,10 +37,10 @@ class TextSensor : public EntityBase, public EntityBase_DeviceClass { void add_filter(Filter *filter); /// Add a list of vectors to the back of the filter chain. - void add_filters(const std::vector &filters); + void add_filters(std::initializer_list filters); /// Clear the filters and replace them by filters. - void set_filters(const std::vector &filters); + void set_filters(std::initializer_list filters); /// Clear the entire filter chain. void clear_filters(); diff --git a/esphome/components/tuya/climate/tuya_climate.cpp b/esphome/components/tuya/climate/tuya_climate.cpp index 7827a4e3ab..04fb14acff 100644 --- a/esphome/components/tuya/climate/tuya_climate.cpp +++ b/esphome/components/tuya/climate/tuya_climate.cpp @@ -283,8 +283,11 @@ void TuyaClimate::control_fan_mode_(const climate::ClimateCall &call) { climate::ClimateTraits TuyaClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_action(true); - traits.set_supports_current_temperature(this->current_temperature_id_.has_value()); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION); + if (this->current_temperature_id_.has_value()) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } + if (supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); if (supports_cool_) diff --git a/esphome/components/uart/uart_component_esp8266.cpp b/esphome/components/uart/uart_component_esp8266.cpp index b2bf2bacf1..7a453dbb50 100644 --- a/esphome/components/uart/uart_component_esp8266.cpp +++ b/esphome/components/uart/uart_component_esp8266.cpp @@ -56,6 +56,13 @@ uint32_t ESP8266UartComponent::get_config() { } void ESP8266UartComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + // Use Arduino HardwareSerial UARTs if all used pins match the ones // preconfigured by the platform. For example if RX disabled but TX pin // is 1 we still want to use Serial. diff --git a/esphome/components/uart/uart_component_esp_idf.cpp b/esphome/components/uart/uart_component_esp_idf.cpp index 7530856b1e..cffa3308eb 100644 --- a/esphome/components/uart/uart_component_esp_idf.cpp +++ b/esphome/components/uart/uart_component_esp_idf.cpp @@ -6,6 +6,9 @@ #include "esphome/core/defines.h" #include "esphome/core/helpers.h" #include "esphome/core/log.h" +#include "esphome/core/gpio.h" +#include "driver/gpio.h" +#include "soc/gpio_num.h" #ifdef USE_LOGGER #include "esphome/components/logger/logger.h" @@ -104,6 +107,13 @@ void IDFUARTComponent::load_settings(bool dump_config) { return; } + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1; int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1; int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1; diff --git a/esphome/components/uart/uart_component_libretiny.cpp b/esphome/components/uart/uart_component_libretiny.cpp index 8a7a301cfe..9c065fe5df 100644 --- a/esphome/components/uart/uart_component_libretiny.cpp +++ b/esphome/components/uart/uart_component_libretiny.cpp @@ -46,6 +46,13 @@ uint16_t LibreTinyUARTComponent::get_config() { } void LibreTinyUARTComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin(); int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin(); bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted(); diff --git a/esphome/components/uart/uart_component_rp2040.cpp b/esphome/components/uart/uart_component_rp2040.cpp index ae3042fb77..c78691653d 100644 --- a/esphome/components/uart/uart_component_rp2040.cpp +++ b/esphome/components/uart/uart_component_rp2040.cpp @@ -52,6 +52,13 @@ uint16_t RP2040UartComponent::get_config() { } void RP2040UartComponent::setup() { + if (this->rx_pin_) { + this->rx_pin_->setup(); + } + if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) { + this->tx_pin_->setup(); + } + uint16_t config = get_config(); constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28}); diff --git a/esphome/components/uponor_smatrix/__init__.py b/esphome/components/uponor_smatrix/__init__.py index d4102d1026..9588b0df7f 100644 --- a/esphome/components/uponor_smatrix/__init__.py +++ b/esphome/components/uponor_smatrix/__init__.py @@ -17,6 +17,12 @@ UponorSmatrixDevice = uponor_smatrix_ns.class_( "UponorSmatrixDevice", cg.Parented.template(UponorSmatrixComponent) ) + +device_address = cv.All( + cv.hex_int, + cv.Range(min=0x1000000, max=0xFFFFFFFF, msg="Expected a 32 bit device address"), +) + CONF_UPONOR_SMATRIX_ID = "uponor_smatrix_id" CONF_TIME_DEVICE_ADDRESS = "time_device_address" @@ -24,9 +30,12 @@ CONFIG_SCHEMA = ( cv.Schema( { cv.GenerateID(): cv.declare_id(UponorSmatrixComponent), - cv.Optional(CONF_ADDRESS): cv.hex_uint16_t, + cv.Optional(CONF_ADDRESS): cv.invalid( + f"The '{CONF_ADDRESS}' option has been removed. " + "Use full 32 bit addresses in the device definitions instead." + ), cv.Optional(CONF_TIME_ID): cv.use_id(time.RealTimeClock), - cv.Optional(CONF_TIME_DEVICE_ADDRESS): cv.hex_uint16_t, + cv.Optional(CONF_TIME_DEVICE_ADDRESS): device_address, } ) .extend(cv.COMPONENT_SCHEMA) @@ -47,7 +56,7 @@ FINAL_VALIDATE_SCHEMA = uart.final_validate_device_schema( UPONOR_SMATRIX_DEVICE_SCHEMA = cv.Schema( { cv.GenerateID(CONF_UPONOR_SMATRIX_ID): cv.use_id(UponorSmatrixComponent), - cv.Required(CONF_ADDRESS): cv.hex_uint16_t, + cv.Required(CONF_ADDRESS): device_address, } ) @@ -58,17 +67,15 @@ async def to_code(config): await cg.register_component(var, config) await uart.register_uart_device(var, config) - if address := config.get(CONF_ADDRESS): - cg.add(var.set_system_address(address)) if time_id := config.get(CONF_TIME_ID): time_ = await cg.get_variable(time_id) cg.add(var.set_time_id(time_)) - if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS): - cg.add(var.set_time_device_address(time_device_address)) + if time_device_address := config.get(CONF_TIME_DEVICE_ADDRESS): + cg.add(var.set_time_device_address(time_device_address)) async def register_uponor_smatrix_device(var, config): parent = await cg.get_variable(config[CONF_UPONOR_SMATRIX_ID]) cg.add(var.set_parent(parent)) - cg.add(var.set_device_address(config[CONF_ADDRESS])) + cg.add(var.set_address(config[CONF_ADDRESS])) cg.add(parent.register_device(var)) diff --git a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp index 19a9112c73..4256b01c4e 100644 --- a/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp +++ b/esphome/components/uponor_smatrix/climate/uponor_smatrix_climate.cpp @@ -10,7 +10,7 @@ static const char *const TAG = "uponor_smatrix.climate"; void UponorSmatrixClimate::dump_config() { LOG_CLIMATE("", "Uponor Smatrix Climate", this); - ESP_LOGCONFIG(TAG, " Device address: 0x%04X", this->address_); + ESP_LOGCONFIG(TAG, " Device address: 0x%08X", this->address_); } void UponorSmatrixClimate::loop() { @@ -30,10 +30,9 @@ void UponorSmatrixClimate::loop() { climate::ClimateTraits UponorSmatrixClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(true); - traits.set_supports_current_humidity(true); + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE | climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY | + climate::CLIMATE_SUPPORTS_ACTION); traits.set_supported_modes({climate::CLIMATE_MODE_HEAT}); - traits.set_supports_action(true); traits.set_supported_presets({climate::CLIMATE_PRESET_ECO}); traits.set_visual_min_temperature(this->min_temperature_); traits.set_visual_max_temperature(this->max_temperature_); diff --git a/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp b/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp index a1d0db214f..7ee12edcdb 100644 --- a/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp +++ b/esphome/components/uponor_smatrix/sensor/uponor_smatrix_sensor.cpp @@ -9,7 +9,7 @@ static const char *const TAG = "uponor_smatrix.sensor"; void UponorSmatrixSensor::dump_config() { ESP_LOGCONFIG(TAG, "Uponor Smatrix Sensor\n" - " Device address: 0x%04X", + " Device address: 0x%08X", this->address_); LOG_SENSOR(" ", "Temperature", this->temperature_sensor_); LOG_SENSOR(" ", "External Temperature", this->external_temperature_sensor_); diff --git a/esphome/components/uponor_smatrix/uponor_smatrix.cpp b/esphome/components/uponor_smatrix/uponor_smatrix.cpp index 867305059f..221f07c80e 100644 --- a/esphome/components/uponor_smatrix/uponor_smatrix.cpp +++ b/esphome/components/uponor_smatrix/uponor_smatrix.cpp @@ -18,11 +18,10 @@ void UponorSmatrixComponent::setup() { void UponorSmatrixComponent::dump_config() { ESP_LOGCONFIG(TAG, "Uponor Smatrix"); - ESP_LOGCONFIG(TAG, " System address: 0x%04X", this->address_); #ifdef USE_TIME if (this->time_id_ != nullptr) { ESP_LOGCONFIG(TAG, " Time synchronization: YES"); - ESP_LOGCONFIG(TAG, " Time master device address: 0x%04X", this->time_device_address_); + ESP_LOGCONFIG(TAG, " Time master device address: 0x%08X", this->time_device_address_); } #endif @@ -31,7 +30,7 @@ void UponorSmatrixComponent::dump_config() { if (!this->unknown_devices_.empty()) { ESP_LOGCONFIG(TAG, " Detected unknown device addresses:"); for (auto device_address : this->unknown_devices_) { - ESP_LOGCONFIG(TAG, " 0x%04X", device_address); + ESP_LOGCONFIG(TAG, " 0x%08X", device_address); } } } @@ -89,8 +88,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return false; } - uint16_t system_address = encode_uint16(packet[0], packet[1]); - uint16_t device_address = encode_uint16(packet[2], packet[3]); + uint32_t device_address = encode_uint32(packet[0], packet[1], packet[2], packet[3]); uint16_t crc = encode_uint16(packet[packet_len - 1], packet[packet_len - 2]); uint16_t computed_crc = crc16(packet, packet_len - 2); @@ -99,24 +97,14 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return false; } - ESP_LOGV(TAG, "Received packet: sys=%04X, dev=%04X, data=%s, crc=%04X", system_address, device_address, + ESP_LOGV(TAG, "Received packet: addr=%08X, data=%s, crc=%04X", device_address, format_hex(&packet[4], packet_len - 6).c_str(), crc); - // Detect or check system address - if (this->address_ == 0) { - ESP_LOGI(TAG, "Using detected system address 0x%04X", system_address); - this->address_ = system_address; - } else if (this->address_ != system_address) { - // This should never happen except if the system address was set or detected incorrectly, so warn the user. - ESP_LOGW(TAG, "Received packet from unknown system address 0x%04X", system_address); - return true; - } - // Handle packet size_t data_len = (packet_len - 6) / 3; if (data_len == 0) { if (packet[4] == UPONOR_ID_REQUEST) - ESP_LOGVV(TAG, "Ignoring request packet for device 0x%04X", device_address); + ESP_LOGVV(TAG, "Ignoring request packet for device 0x%08X", device_address); return true; } @@ -141,7 +129,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { if (data[i].id == UPONOR_ID_DATETIME1) found_time = true; if (found_temperature && found_time) { - ESP_LOGI(TAG, "Using detected time device address 0x%04X", device_address); + ESP_LOGI(TAG, "Using detected time device address 0x%08X", device_address); this->time_device_address_ = device_address; break; } @@ -160,7 +148,7 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { // Log unknown device addresses if (!found && !this->unknown_devices_.count(device_address)) { - ESP_LOGI(TAG, "Received packet for unknown device address 0x%04X ", device_address); + ESP_LOGI(TAG, "Received packet for unknown device address 0x%08X ", device_address); this->unknown_devices_.insert(device_address); } @@ -168,16 +156,16 @@ bool UponorSmatrixComponent::parse_byte_(uint8_t byte) { return true; } -bool UponorSmatrixComponent::send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len) { - if (this->address_ == 0 || device_address == 0 || data == nullptr || data_len == 0) +bool UponorSmatrixComponent::send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len) { + if (device_address == 0 || data == nullptr || data_len == 0) return false; // Assemble packet for send queue. All fields are big-endian except for the little-endian checksum. std::vector packet; packet.reserve(6 + 3 * data_len); - packet.push_back(this->address_ >> 8); - packet.push_back(this->address_ >> 0); + packet.push_back(device_address >> 24); + packet.push_back(device_address >> 16); packet.push_back(device_address >> 8); packet.push_back(device_address >> 0); diff --git a/esphome/components/uponor_smatrix/uponor_smatrix.h b/esphome/components/uponor_smatrix/uponor_smatrix.h index e3e19a12fc..bd760f0d77 100644 --- a/esphome/components/uponor_smatrix/uponor_smatrix.h +++ b/esphome/components/uponor_smatrix/uponor_smatrix.h @@ -71,23 +71,21 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { void dump_config() override; void loop() override; - void set_system_address(uint16_t address) { this->address_ = address; } void register_device(UponorSmatrixDevice *device) { this->devices_.push_back(device); } - bool send(uint16_t device_address, const UponorSmatrixData *data, size_t data_len); + bool send(uint32_t device_address, const UponorSmatrixData *data, size_t data_len); #ifdef USE_TIME void set_time_id(time::RealTimeClock *time_id) { this->time_id_ = time_id; } - void set_time_device_address(uint16_t address) { this->time_device_address_ = address; } + void set_time_device_address(uint32_t address) { this->time_device_address_ = address; } void send_time() { this->send_time_requested_ = true; } #endif protected: bool parse_byte_(uint8_t byte); - uint16_t address_; std::vector devices_; - std::set unknown_devices_; + std::set unknown_devices_; std::vector rx_buffer_; std::queue> tx_queue_; @@ -96,7 +94,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { #ifdef USE_TIME time::RealTimeClock *time_id_{nullptr}; - uint16_t time_device_address_; + uint32_t time_device_address_; bool send_time_requested_; bool do_send_time_(); #endif @@ -104,7 +102,7 @@ class UponorSmatrixComponent : public uart::UARTDevice, public Component { class UponorSmatrixDevice : public Parented { public: - void set_device_address(uint16_t address) { this->address_ = address; } + void set_address(uint32_t address) { this->address_ = address; } virtual void on_device_data(const UponorSmatrixData *data, size_t data_len) = 0; bool send(const UponorSmatrixData *data, size_t data_len) { @@ -113,7 +111,7 @@ class UponorSmatrixDevice : public Parented { protected: friend UponorSmatrixComponent; - uint16_t address_; + uint32_t address_; }; inline float raw_to_celsius(uint16_t raw) { diff --git a/esphome/components/web_server/__init__.py b/esphome/components/web_server/__init__.py index 288d928e80..a7fdf30eef 100644 --- a/esphome/components/web_server/__init__.py +++ b/esphome/components/web_server/__init__.py @@ -136,6 +136,18 @@ def _final_validate_sorting(config: ConfigType) -> ConfigType: FINAL_VALIDATE_SCHEMA = _final_validate_sorting + +def _consume_web_server_sockets(config: ConfigType) -> ConfigType: + """Register socket needs for web_server component.""" + from esphome.components import socket + + # Web server needs 1 listening socket + typically 2 concurrent client connections + # (browser makes 2 connections for page + event stream) + sockets_needed = 3 + socket.consume_sockets(sockets_needed, "web_server")(config) + return config + + sorting_group = { cv.Required(CONF_ID): cv.declare_id(cg.int_), cv.Required(CONF_NAME): cv.string, @@ -205,6 +217,7 @@ CONFIG_SCHEMA = cv.All( validate_local, validate_sorting_groups, validate_ota, + _consume_web_server_sockets, ) diff --git a/esphome/components/wifi/__init__.py b/esphome/components/wifi/__init__.py index 494470cb48..ba488728b7 100644 --- a/esphome/components/wifi/__init__.py +++ b/esphome/components/wifi/__init__.py @@ -213,11 +213,15 @@ def _validate(config): if CONF_EAP in config: network[CONF_EAP] = config.pop(CONF_EAP) if CONF_NETWORKS in config: - raise cv.Invalid( - "You cannot use the 'ssid:' option together with 'networks:'. Please " - "copy your network into the 'networks:' key" - ) - config[CONF_NETWORKS] = cv.ensure_list(WIFI_NETWORK_STA)(network) + # In testing mode, merged component tests may have both ssid and networks + # Just use the networks list and ignore the single ssid + if not CORE.testing_mode: + raise cv.Invalid( + "You cannot use the 'ssid:' option together with 'networks:'. Please " + "copy your network into the 'networks:' key" + ) + else: + config[CONF_NETWORKS] = cv.ensure_list(WIFI_NETWORK_STA)(network) if (CONF_NETWORKS not in config) and (CONF_AP not in config): config = config.copy() @@ -378,14 +382,19 @@ async def to_code(config): # Track if any network uses Enterprise authentication has_eap = False - def add_sta(ap, network): - ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) - cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + # Initialize FixedVector with the count of networks + networks = config.get(CONF_NETWORKS, []) + if networks: + cg.add(var.init_sta(len(networks))) - for network in config.get(CONF_NETWORKS, []): - if CONF_EAP in network: - has_eap = True - cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) + def add_sta(ap: cg.MockObj, network: dict) -> None: + ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP)) + cg.add(var.add_sta(wifi_network(network, ap, ip_config))) + + for network in networks: + if CONF_EAP in network: + has_eap = True + cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network) if CONF_AP in config: conf = config[CONF_AP] diff --git a/esphome/components/wifi/wifi_component.cpp b/esphome/components/wifi/wifi_component.cpp index c0e3069d74..b278e5a386 100644 --- a/esphome/components/wifi/wifi_component.cpp +++ b/esphome/components/wifi/wifi_component.cpp @@ -330,9 +330,11 @@ float WiFiComponent::get_loop_priority() const { return 10.0f; // before other loop components } +void WiFiComponent::init_sta(size_t count) { this->sta_.init(count); } void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); } void WiFiComponent::set_sta(const WiFiAP &ap) { this->clear_sta(); + this->init_sta(1); this->add_sta(ap); } void WiFiComponent::clear_sta() { this->sta_.clear(); } @@ -650,8 +652,10 @@ void WiFiComponent::check_scanning_finished() { // selected network is hidden, we use the data from the config selected.set_hidden(true); selected.set_ssid(config.get_ssid()); - // don't set BSSID and channel, there might be multiple hidden networks + // Clear channel and BSSID for hidden networks - there might be multiple hidden networks // but we can't know which one is the correct one. Rely on probe-req with just SSID. + selected.set_channel(0); + selected.set_bssid(optional{}); } else { // selected network is visible, we use the data from the scan // limit the connect params to only connect to exactly this network diff --git a/esphome/components/wifi/wifi_component.h b/esphome/components/wifi/wifi_component.h index 10aa82a065..42f78dbfac 100644 --- a/esphome/components/wifi/wifi_component.h +++ b/esphome/components/wifi/wifi_component.h @@ -219,6 +219,7 @@ class WiFiComponent : public Component { void set_sta(const WiFiAP &ap); WiFiAP get_sta() { return this->selected_ap_; } + void init_sta(size_t count); void add_sta(const WiFiAP &ap); void clear_sta(); @@ -393,7 +394,7 @@ class WiFiComponent : public Component { #endif std::string use_address_; - std::vector sta_; + FixedVector sta_; std::vector sta_priorities_; wifi_scan_vector_t scan_result_; WiFiAP selected_ap_; diff --git a/esphome/components/wifi/wifi_component_esp_idf.cpp b/esphome/components/wifi/wifi_component_esp_idf.cpp index a483e893e9..08ecba3598 100644 --- a/esphome/components/wifi/wifi_component_esp_idf.cpp +++ b/esphome/components/wifi/wifi_component_esp_idf.cpp @@ -776,13 +776,12 @@ void WiFiComponent::wifi_process_event_(IDFWiFiEvent *data) { } uint16_t number = it.number; - std::vector records(number); - err = esp_wifi_scan_get_ap_records(&number, records.data()); + auto records = std::make_unique(number); + err = esp_wifi_scan_get_ap_records(&number, records.get()); if (err != ESP_OK) { ESP_LOGW(TAG, "esp_wifi_scan_get_ap_records failed: %s", esp_err_to_name(err)); return; } - records.resize(number); scan_result_.init(number); for (int i = 0; i < number; i++) { diff --git a/esphome/components/wled/wled_light_effect.cpp b/esphome/components/wled/wled_light_effect.cpp index 25577ccc11..d26b7a1750 100644 --- a/esphome/components/wled/wled_light_effect.cpp +++ b/esphome/components/wled/wled_light_effect.cpp @@ -28,7 +28,7 @@ const int DEFAULT_BLANK_TIME = 1000; static const char *const TAG = "wled_light_effect"; -WLEDLightEffect::WLEDLightEffect(const std::string &name) : AddressableLightEffect(name) {} +WLEDLightEffect::WLEDLightEffect(const char *name) : AddressableLightEffect(name) {} void WLEDLightEffect::start() { AddressableLightEffect::start(); diff --git a/esphome/components/wled/wled_light_effect.h b/esphome/components/wled/wled_light_effect.h index a591e1fd1a..6da5f4e9f9 100644 --- a/esphome/components/wled/wled_light_effect.h +++ b/esphome/components/wled/wled_light_effect.h @@ -15,7 +15,7 @@ namespace wled { class WLEDLightEffect : public light::AddressableLightEffect { public: - WLEDLightEffect(const std::string &name); + WLEDLightEffect(const char *name); void start() override; void stop() override; diff --git a/esphome/components/yashima/yashima.cpp b/esphome/components/yashima/yashima.cpp index a3cf53ff66..bf91420620 100644 --- a/esphome/components/yashima/yashima.cpp +++ b/esphome/components/yashima/yashima.cpp @@ -81,7 +81,9 @@ const uint32_t YASHIMA_CARRIER_FREQUENCY = 38000; climate::ClimateTraits YashimaClimate::traits() { auto traits = climate::ClimateTraits(); - traits.set_supports_current_temperature(this->sensor_ != nullptr); + if (this->sensor_ != nullptr) { + traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE); + } traits.set_supported_modes({climate::CLIMATE_MODE_OFF, climate::CLIMATE_MODE_HEAT_COOL}); if (supports_cool_) @@ -89,7 +91,6 @@ climate::ClimateTraits YashimaClimate::traits() { if (supports_heat_) traits.add_supported_mode(climate::CLIMATE_MODE_HEAT); - traits.set_supports_two_point_target_temperature(false); traits.set_visual_min_temperature(YASHIMA_TEMP_MIN); traits.set_visual_max_temperature(YASHIMA_TEMP_MAX); traits.set_visual_temperature_step(1); diff --git a/esphome/components/zephyr_ble_server/__init__.py b/esphome/components/zephyr_ble_server/__init__.py new file mode 100644 index 0000000000..211941e984 --- /dev/null +++ b/esphome/components/zephyr_ble_server/__init__.py @@ -0,0 +1,34 @@ +import esphome.codegen as cg +from esphome.components.zephyr import zephyr_add_prj_conf +import esphome.config_validation as cv +from esphome.const import CONF_ESPHOME, CONF_ID, CONF_NAME, Framework +import esphome.final_validate as fv + +zephyr_ble_server_ns = cg.esphome_ns.namespace("zephyr_ble_server") +BLEServer = zephyr_ble_server_ns.class_("BLEServer", cg.Component) + +CONFIG_SCHEMA = cv.All( + cv.Schema( + { + cv.GenerateID(): cv.declare_id(BLEServer), + } + ).extend(cv.COMPONENT_SCHEMA), + cv.only_with_framework(Framework.ZEPHYR), +) + + +def _final_validate(_): + full_config = fv.full_config.get() + zephyr_add_prj_conf("BT_DEVICE_NAME", full_config[CONF_ESPHOME][CONF_NAME]) + + +FINAL_VALIDATE_SCHEMA = _final_validate + + +async def to_code(config): + var = cg.new_Pvariable(config[CONF_ID]) + zephyr_add_prj_conf("BT", True) + zephyr_add_prj_conf("BT_PERIPHERAL", True) + zephyr_add_prj_conf("BT_RX_STACK_SIZE", 1536) + # zephyr_add_prj_conf("BT_LL_SW_SPLIT", True) + await cg.register_component(var, config) diff --git a/esphome/components/zephyr_ble_server/ble_server.cpp b/esphome/components/zephyr_ble_server/ble_server.cpp new file mode 100644 index 0000000000..9f7e606a90 --- /dev/null +++ b/esphome/components/zephyr_ble_server/ble_server.cpp @@ -0,0 +1,100 @@ +#ifdef USE_ZEPHYR +#include "ble_server.h" +#include "esphome/core/defines.h" +#include "esphome/core/log.h" +#include +#include + +namespace esphome::zephyr_ble_server { + +static const char *const TAG = "zephyr_ble_server"; + +static struct k_work advertise_work; // NOLINT(cppcoreguidelines-avoid-non-const-global-variables) + +#define DEVICE_NAME CONFIG_BT_DEVICE_NAME +#define DEVICE_NAME_LEN (sizeof(DEVICE_NAME) - 1) + +static const struct bt_data AD[] = { + BT_DATA_BYTES(BT_DATA_FLAGS, (BT_LE_AD_GENERAL | BT_LE_AD_NO_BREDR)), + BT_DATA(BT_DATA_NAME_COMPLETE, DEVICE_NAME, DEVICE_NAME_LEN), +}; + +static const struct bt_data SD[] = { +#ifdef USE_OTA + BT_DATA_BYTES(BT_DATA_UUID128_ALL, 0x84, 0xaa, 0x60, 0x74, 0x52, 0x8a, 0x8b, 0x86, 0xd3, 0x4c, 0xb7, 0x1d, 0x1d, + 0xdc, 0x53, 0x8d), +#endif +}; + +const struct bt_le_adv_param *const ADV_PARAM = BT_LE_ADV_CONN; + +static void advertise(struct k_work *work) { + int rc = bt_le_adv_stop(); + if (rc) { + ESP_LOGE(TAG, "Advertising failed to stop (rc %d)", rc); + } + + rc = bt_le_adv_start(ADV_PARAM, AD, ARRAY_SIZE(AD), SD, ARRAY_SIZE(SD)); + if (rc) { + ESP_LOGE(TAG, "Advertising failed to start (rc %d)", rc); + return; + } + ESP_LOGI(TAG, "Advertising successfully started"); +} + +static void connected(struct bt_conn *conn, uint8_t err) { + if (err) { + ESP_LOGE(TAG, "Connection failed (err 0x%02x)", err); + } else { + ESP_LOGI(TAG, "Connected"); + } +} + +static void disconnected(struct bt_conn *conn, uint8_t reason) { + ESP_LOGI(TAG, "Disconnected (reason 0x%02x)", reason); + k_work_submit(&advertise_work); +} + +static void bt_ready(int err) { + if (err != 0) { + ESP_LOGE(TAG, "Bluetooth failed to initialise: %d", err); + } else { + k_work_submit(&advertise_work); + } +} + +BT_CONN_CB_DEFINE(conn_callbacks) = { + .connected = connected, + .disconnected = disconnected, +}; + +void BLEServer::setup() { + k_work_init(&advertise_work, advertise); + resume_(); +} + +void BLEServer::loop() { + if (this->suspended_) { + resume_(); + this->suspended_ = false; + } +} + +void BLEServer::resume_() { + int rc = bt_enable(bt_ready); + if (rc != 0) { + ESP_LOGE(TAG, "Bluetooth enable failed: %d", rc); + return; + } +} + +void BLEServer::on_shutdown() { + struct k_work_sync sync; + k_work_cancel_sync(&advertise_work, &sync); + bt_disable(); + this->suspended_ = true; +} + +} // namespace esphome::zephyr_ble_server + +#endif diff --git a/esphome/components/zephyr_ble_server/ble_server.h b/esphome/components/zephyr_ble_server/ble_server.h new file mode 100644 index 0000000000..1b32e9b58c --- /dev/null +++ b/esphome/components/zephyr_ble_server/ble_server.h @@ -0,0 +1,19 @@ +#pragma once +#ifdef USE_ZEPHYR +#include "esphome/core/component.h" + +namespace esphome::zephyr_ble_server { + +class BLEServer : public Component { + public: + void setup() override; + void loop() override; + void on_shutdown() override; + + protected: + void resume_(); + bool suspended_ = false; +}; + +} // namespace esphome::zephyr_ble_server +#endif diff --git a/esphome/config.py b/esphome/config.py index 6adecb5c65..634dba8dad 100644 --- a/esphome/config.py +++ b/esphome/config.py @@ -12,7 +12,7 @@ from typing import Any import voluptuous as vol from esphome import core, loader, pins, yaml_util -from esphome.config_helpers import Extend, Remove, merge_dicts_ordered +from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered import esphome.config_validation as cv from esphome.const import ( CONF_ESPHOME, @@ -324,13 +324,7 @@ def iter_ids(config, path=None): yield from iter_ids(value, path + [key]) -def recursive_check_replaceme(value): - if isinstance(value, list): - return cv.Schema([recursive_check_replaceme])(value) - if isinstance(value, dict): - return cv.Schema({cv.valid: recursive_check_replaceme})(value) - if isinstance(value, ESPLiteralValue): - pass +def check_replaceme(value): if isinstance(value, str) and value == "REPLACEME": raise cv.Invalid( "Found 'REPLACEME' in configuration, this is most likely an error. " @@ -339,7 +333,86 @@ def recursive_check_replaceme(value): "If you want to use the literal REPLACEME string, " 'please use "!literal REPLACEME"' ) - return value + + +def _build_list_index(lst): + index = OrderedDict() + extensions, removals = [], set() + for item in lst: + if item is None: + removals.add(None) + continue + item_id = None + if isinstance(item, dict) and (item_id := item.get(CONF_ID)): + if isinstance(item_id, Extend): + extensions.append(item) + continue + if isinstance(item_id, Remove): + removals.add(item_id.value) + continue + if not item_id or item_id in index: + # no id or duplicate -> pass through with identity-based key + item_id = id(item) + index[item_id] = item + return index, extensions, removals + + +def resolve_extend_remove(value, is_key=None): + if isinstance(value, ESPLiteralValue): + return # do not check inside literal blocks + if isinstance(value, list): + index, extensions, removals = _build_list_index(value) + if extensions or removals: + # Rebuild the original list after + # processing all extensions and removals + for item in extensions: + item_id = item[CONF_ID].value + if item_id in removals: + continue + old = index.get(item_id) + if old is None: + # Failed to find source for extension + # Find index of item to show error at correct position + i = next( + ( + i + for i, d in enumerate(value) + if d.get(CONF_ID) == item[CONF_ID] + ) + ) + with cv.prepend_path(i): + raise cv.Invalid( + f"Source for extension of ID '{item_id}' was not found." + ) + item[CONF_ID] = item_id + index[item_id] = merge_config(old, item) + for item_id in removals: + index.pop(item_id, None) + + value[:] = index.values() + + for i, item in enumerate(value): + with cv.prepend_path(i): + resolve_extend_remove(item, False) + return + if isinstance(value, dict): + removals = [] + for k, v in value.items(): + with cv.prepend_path(k): + if isinstance(v, Remove): + removals.append(k) + continue + resolve_extend_remove(k, True) + resolve_extend_remove(v, False) + for k in removals: + value.pop(k, None) + return + if is_key: + return # do not check keys (yet) + + check_replaceme(value) + + return class ConfigValidationStep(abc.ABC): @@ -437,19 +510,6 @@ class LoadValidationStep(ConfigValidationStep): continue p_name = p_config.get("platform") if p_name is None: - p_id = p_config.get(CONF_ID) - if isinstance(p_id, Extend): - result.add_str_error( - f"Source for extension of ID '{p_id.value}' was not found.", - path + [CONF_ID], - ) - continue - if isinstance(p_id, Remove): - result.add_str_error( - f"Source for removal of ID '{p_id.value}' was not found.", - path + [CONF_ID], - ) - continue result.add_str_error( f"'{self.domain}' requires a 'platform' key but it was not specified.", path, @@ -934,9 +994,10 @@ def validate_config( CORE.raw_config = config - # 1.1. Check for REPLACEME special value + # 1.1. Resolve !extend and !remove and check for REPLACEME + # After this step, there will not be any Extend or Remove values in the config anymore try: - recursive_check_replaceme(config) + resolve_extend_remove(config) except vol.Invalid as err: result.add_error(err) diff --git a/esphome/config_helpers.py b/esphome/config_helpers.py index 88cfa49fdc..c0a3b99968 100644 --- a/esphome/config_helpers.py +++ b/esphome/config_helpers.py @@ -1,7 +1,6 @@ from collections.abc import Callable from esphome.const import ( - CONF_ID, CONF_LEVEL, CONF_LOGGER, KEY_CORE, @@ -75,73 +74,28 @@ class Remove: return isinstance(b, Remove) and self.value == b.value -def merge_config(full_old, full_new): - def merge(old, new): - if isinstance(new, dict): - if not isinstance(old, dict): - return new - # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict - if isinstance(old, OrderedDict) or isinstance(new, OrderedDict): - res = OrderedDict(old) - else: - res = old.copy() - for k, v in new.items(): - if isinstance(v, Remove) and k in old: - del res[k] - else: - res[k] = merge(old[k], v) if k in old else v - return res - if isinstance(new, list): - if not isinstance(old, list): - return new - res = old.copy() - ids = { - v_id: i - for i, v in enumerate(res) - if isinstance(v, dict) - and (v_id := v.get(CONF_ID)) - and isinstance(v_id, str) - } - extend_ids = { - v_id.value: i - for i, v in enumerate(res) - if isinstance(v, dict) - and (v_id := v.get(CONF_ID)) - and isinstance(v_id, Extend) - } - - ids_to_delete = [] - for v in new: - if isinstance(v, dict) and (new_id := v.get(CONF_ID)): - if isinstance(new_id, Extend): - new_id = new_id.value - if new_id in ids: - v[CONF_ID] = new_id - res[ids[new_id]] = merge(res[ids[new_id]], v) - continue - elif isinstance(new_id, Remove): - new_id = new_id.value - if new_id in ids: - ids_to_delete.append(ids[new_id]) - continue - elif ( - new_id in extend_ids - ): # When a package is extending a non-packaged item - extend_res = res[extend_ids[new_id]] - extend_res[CONF_ID] = new_id - new_v = merge(v, extend_res) - res[extend_ids[new_id]] = new_v - continue - else: - ids[new_id] = len(res) - res.append(v) - return [v for i, v in enumerate(res) if i not in ids_to_delete] - if new is None: - return old - +def merge_config(old, new): + if isinstance(new, Remove): return new + if isinstance(new, dict): + if not isinstance(old, dict): + return new + # Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict + if isinstance(old, OrderedDict) or isinstance(new, OrderedDict): + res = OrderedDict(old) + else: + res = old.copy() + for k, v in new.items(): + res[k] = merge_config(old.get(k), v) + return res + if isinstance(new, list): + if not isinstance(old, list): + return new + return old + new + if new is None: + return old - return merge(full_old, full_new) + return new def filter_source_files_from_platform( diff --git a/esphome/config_validation.py b/esphome/config_validation.py index e2f0b835c9..c613a984c4 100644 --- a/esphome/config_validation.py +++ b/esphome/config_validation.py @@ -24,7 +24,6 @@ import voluptuous as vol from esphome import core import esphome.codegen as cg -from esphome.config_helpers import Extend, Remove from esphome.const import ( ALLOWED_NAME_CHARS, CONF_AVAILABILITY, @@ -624,12 +623,6 @@ def declare_id(type): if value is None: return core.ID(None, is_declaration=True, type=type) - if isinstance(value, Extend): - raise Invalid(f"Source for extension of ID '{value.value}' was not found.") - - if isinstance(value, Remove): - raise Invalid(f"Source for Removal of ID '{value.value}' was not found.") - return core.ID(validate_id_name(value), is_declaration=True, type=type) return validator diff --git a/esphome/const.py b/esphome/const.py index ce1c033e41..3bbc6b8b3f 100644 --- a/esphome/const.py +++ b/esphome/const.py @@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy" CONF_INC_PIN = "inc_pin" CONF_INCLUDE_INTERNAL = "include_internal" CONF_INCLUDES = "includes" +CONF_INCLUDES_C = "includes_c" CONF_INDEX = "index" CONF_INDOOR = "indoor" CONF_INFRARED = "infrared" diff --git a/esphome/core/__init__.py b/esphome/core/__init__.py index a3efcf69a9..e1f1f688f0 100644 --- a/esphome/core/__init__.py +++ b/esphome/core/__init__.py @@ -636,11 +636,9 @@ class EsphomeCore: if self.config is None: raise ValueError("Config has not been loaded yet") - if CONF_WIFI in self.config: - return self.config[CONF_WIFI][CONF_USE_ADDRESS] - - if CONF_ETHERNET in self.config: - return self.config[CONF_ETHERNET][CONF_USE_ADDRESS] + for network_type in (CONF_WIFI, CONF_ETHERNET, CONF_OPENTHREAD): + if network_type in self.config: + return self.config[network_type][CONF_USE_ADDRESS] if CONF_OPENTHREAD in self.config: return f"{self.name}.local" diff --git a/esphome/core/application.h b/esphome/core/application.h index 6e7f1b49f2..29a734f000 100644 --- a/esphome/core/application.h +++ b/esphome/core/application.h @@ -39,7 +39,7 @@ #include "esphome/components/text_sensor/text_sensor.h" #endif #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #endif #ifdef USE_CLIMATE #include "esphome/components/climate/climate.h" diff --git a/esphome/core/automation.h b/esphome/core/automation.h index e156818312..0512752d50 100644 --- a/esphome/core/automation.h +++ b/esphome/core/automation.h @@ -243,7 +243,7 @@ template class ActionList { } this->actions_end_ = action; } - void add_actions(const std::vector *> &actions) { + void add_actions(const std::initializer_list *> &actions) { for (auto *action : actions) { this->add_action(action); } @@ -286,7 +286,7 @@ template class Automation { explicit Automation(Trigger *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); } void add_action(Action *action) { this->actions_.add_action(action); } - void add_actions(const std::vector *> &actions) { this->actions_.add_actions(actions); } + void add_actions(const std::initializer_list *> &actions) { this->actions_.add_actions(actions); } void stop() { this->actions_.stop(); } diff --git a/esphome/core/base_automation.h b/esphome/core/base_automation.h index f1248e0035..af8cde971b 100644 --- a/esphome/core/base_automation.h +++ b/esphome/core/base_automation.h @@ -194,12 +194,12 @@ template class IfAction : public Action { public: explicit IfAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } - void add_else(const std::vector *> &actions) { + void add_else(const std::initializer_list *> &actions) { this->else_.add_actions(actions); this->else_.add_action(new LambdaAction([this](Ts... x) { this->play_next_(x...); })); } @@ -240,7 +240,7 @@ template class WhileAction : public Action { public: WhileAction(Condition *condition) : condition_(condition) {} - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](Ts... x) { if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) { @@ -287,7 +287,7 @@ template class RepeatAction : public Action { public: TEMPLATABLE_VALUE(uint32_t, count) - void add_then(const std::vector *> &actions) { + void add_then(const std::initializer_list *> &actions) { this->then_.add_actions(actions); this->then_.add_action(new LambdaAction([this](uint32_t iteration, Ts... x) { iteration++; diff --git a/esphome/core/color.h b/esphome/core/color.h index 5dce58a485..4b0ae5b57a 100644 --- a/esphome/core/color.h +++ b/esphome/core/color.h @@ -14,6 +14,15 @@ inline static constexpr uint8_t esp_scale8(uint8_t i, uint8_t scale) { return (uint16_t(i) * (1 + uint16_t(scale))) / 256; } +/// Scale an 8-bit value by two 8-bit scale factors with improved precision. +/// This is more accurate than calling esp_scale8() twice because it delays +/// truncation until after both multiplications, preserving intermediate precision. +/// For example: esp_scale8_twice(value, max_brightness, local_brightness) +/// gives better results than esp_scale8(esp_scale8(value, max_brightness), local_brightness) +inline static constexpr uint8_t esp_scale8_twice(uint8_t i, uint8_t scale1, uint8_t scale2) { + return (uint32_t(i) * (1 + uint32_t(scale1)) * (1 + uint32_t(scale2))) >> 16; +} + struct Color { union { struct { diff --git a/esphome/core/config.py b/esphome/core/config.py index 8a5876dbcf..2740453808 100644 --- a/esphome/core/config.py +++ b/esphome/core/config.py @@ -21,6 +21,7 @@ from esphome.const import ( CONF_FRIENDLY_NAME, CONF_ID, CONF_INCLUDES, + CONF_INCLUDES_C, CONF_LIBRARIES, CONF_MIN_VERSION, CONF_NAME, @@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All( } ), cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include), + cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include), cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict), cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean, cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean, @@ -302,6 +304,17 @@ def _list_target_platforms(): return target_platforms +def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]: + system_includes = [] + other_includes = [] + for include in includes: + if include.startswith("<") and include.endswith(">"): + system_includes.append(include) + else: + other_includes.append(include) + return system_includes, other_includes + + def preload_core_config(config, result) -> str: with cv.prepend_path(CONF_ESPHOME): conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME]) @@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str: return target_platforms[0] -def include_file(path: Path, basename: Path): +def include_file(path: Path, basename: Path, is_c_header: bool = False): parts = basename.parts dst = CORE.relative_src_path(*parts) copy_file_if_changed(path, dst) @@ -347,7 +360,14 @@ def include_file(path: Path, basename: Path): ext = path.suffix if ext in [".h", ".hpp", ".tcc"]: # Header, add include statement - cg.add_global(cg.RawStatement(f'#include "{basename}"')) + if is_c_header: + # Wrap in extern "C" block for C headers + cg.add_global( + cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}') + ) + else: + # Regular include + cg.add_global(cg.RawStatement(f'#include "{basename}"')) ARDUINO_GLUE_CODE = """\ @@ -377,7 +397,7 @@ async def add_arduino_global_workaround(): @coroutine_with_priority(CoroPriority.FINAL) -async def add_includes(includes: list[str]) -> None: +async def add_includes(includes: list[str], is_c_header: bool = False) -> None: # Add includes at the very end, so that the included files can access global variables for include in includes: path = CORE.relative_config_path(include) @@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None: # Directory, copy tree for p in walk_files(path): basename = p.relative_to(path.parent) - include_file(p, basename) + include_file(p, basename, is_c_header) else: # Copy file basename = Path(path.name) - include_file(path, basename) + include_file(path, basename, is_c_header) @coroutine_with_priority(CoroPriority.FINAL) @@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None: CORE.add_job(add_arduino_global_workaround) if config[CONF_INCLUDES]: - # Get the <...> includes - system_includes = [] - other_includes = [] - for include in config[CONF_INCLUDES]: - if include.startswith("<") and include.endswith(">"): - system_includes.append(include) - else: - other_includes.append(include) + system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES]) # <...> includes should be at the start for include in system_includes: cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True) # Other includes should be at the end - CORE.add_job(add_includes, other_includes) + CORE.add_job(add_includes, other_includes, False) + + if config[CONF_INCLUDES_C]: + system_includes, other_includes = _sort_includes_by_type( + config[CONF_INCLUDES_C] + ) + # <...> includes should be at the start + for include in system_includes: + cg.add_global( + cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'), + prepend=True, + ) + # Other includes should be at the end + CORE.add_job(add_includes, other_includes, True) if project_conf := config.get(CONF_PROJECT): cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME]) diff --git a/esphome/core/controller.h b/esphome/core/controller.h index 1a5b9ea6b4..b475e326ee 100644 --- a/esphome/core/controller.h +++ b/esphome/core/controller.h @@ -5,7 +5,7 @@ #include "esphome/components/binary_sensor/binary_sensor.h" #endif #ifdef USE_FAN -#include "esphome/components/fan/fan_state.h" +#include "esphome/components/fan/fan.h" #endif #ifdef USE_LIGHT #include "esphome/components/light/light_state.h" diff --git a/esphome/core/defines.h b/esphome/core/defines.h index b1bd7f92d7..39698c1004 100644 --- a/esphome/core/defines.h +++ b/esphome/core/defines.h @@ -44,6 +44,7 @@ #define USE_GRAPHICAL_DISPLAY_MENU #define USE_HOMEASSISTANT_TIME #define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT +#define USE_IMPROV_SERIAL_NEXT_URL #define USE_JSON #define USE_LIGHT #define USE_LOCK @@ -186,6 +187,7 @@ #define USE_ESP32_CAMERA_JPEG_ENCODER #define USE_I2C #define USE_IMPROV +#define USE_ESP32_IMPROV_NEXT_URL #define USE_MICROPHONE #define USE_PSRAM #define USE_SOCKET_IMPL_BSD_SOCKETS @@ -241,8 +243,10 @@ // Dummy firmware payload for shelly_dimmer #define USE_SHD_FIRMWARE_MAJOR_VERSION 56 #define USE_SHD_FIRMWARE_MINOR_VERSION 5 +// clang-format off #define USE_SHD_FIRMWARE_DATA \ {} +// clang-format on #define USE_WEBSERVER #define USE_WEBSERVER_AUTH @@ -273,6 +277,8 @@ #ifdef USE_NRF52 #define USE_NRF52_DFU +#define USE_SOFTDEVICE_ID 7 +#define USE_SOFTDEVICE_VERSION 1 #endif // Disabled feature flags diff --git a/esphome/core/finite_set_mask.h b/esphome/core/finite_set_mask.h new file mode 100644 index 0000000000..f9cd0377c7 --- /dev/null +++ b/esphome/core/finite_set_mask.h @@ -0,0 +1,171 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace esphome { + +/// Default bit mapping policy for contiguous enums starting at 0 +/// Provides 1:1 mapping where enum value equals bit position +template struct DefaultBitPolicy { + // Automatic bitmask type selection based on MaxBits + // ≤8 bits: uint8_t, ≤16 bits: uint16_t, otherwise: uint32_t + using mask_t = typename std::conditional<(MaxBits <= 8), uint8_t, + typename std::conditional<(MaxBits <= 16), uint16_t, uint32_t>::type>::type; + + static constexpr int MAX_BITS = MaxBits; + + static constexpr unsigned to_bit(ValueType value) { return static_cast(value); } + + static constexpr ValueType from_bit(unsigned bit) { return static_cast(bit); } +}; + +/// Generic bitmask for storing a finite set of discrete values efficiently. +/// Replaces std::set to eliminate red-black tree overhead (~586 bytes per instantiation). +/// +/// Template parameters: +/// ValueType: The type to store (typically enum, but can be any discrete bounded type) +/// BitPolicy: Policy class defining bit mapping and mask type (defaults to DefaultBitPolicy) +/// +/// BitPolicy requirements: +/// - using mask_t = // Bitmask storage type +/// - static constexpr int MAX_BITS // Maximum number of bits +/// - static constexpr unsigned to_bit(ValueType) // Convert value to bit position +/// - static constexpr ValueType from_bit(unsigned) // Convert bit position to value +/// +/// Example usage (1:1 mapping - climate enums): +/// // For contiguous enums starting at 0, use DefaultBitPolicy +/// using ClimateModeMask = FiniteSetMask>; +/// ClimateModeMask modes({CLIMATE_MODE_HEAT, CLIMATE_MODE_COOL}); +/// if (modes.count(CLIMATE_MODE_HEAT)) { ... } +/// for (auto mode : modes) { ... } +/// +/// Example usage (custom mapping - ColorMode): +/// // For custom mappings, define a custom BitPolicy +/// // See esphome/components/light/color_mode.h for complete example +/// +/// Design notes: +/// - Policy-based design allows custom bit mappings without template specialization +/// - Iterator converts bit positions to actual values during traversal +/// - All operations are constexpr-compatible for compile-time initialization +/// - Drop-in replacement for std::set with simpler API +/// +template> class FiniteSetMask { + public: + using bitmask_t = typename BitPolicy::mask_t; + + constexpr FiniteSetMask() = default; + + /// Construct from initializer list: {VALUE1, VALUE2, ...} + constexpr FiniteSetMask(std::initializer_list values) { + for (auto value : values) { + this->insert(value); + } + } + + /// Add a single value to the set (std::set compatibility) + constexpr void insert(ValueType value) { this->mask_ |= (static_cast(1) << BitPolicy::to_bit(value)); } + + /// Add multiple values from initializer list + constexpr void insert(std::initializer_list values) { + for (auto value : values) { + this->insert(value); + } + } + + /// Remove a value from the set (std::set compatibility) + constexpr void erase(ValueType value) { this->mask_ &= ~(static_cast(1) << BitPolicy::to_bit(value)); } + + /// Clear all values from the set + constexpr void clear() { this->mask_ = 0; } + + /// Check if the set contains a specific value (std::set compatibility) + /// Returns 1 if present, 0 if not (same as std::set for unique elements) + constexpr size_t count(ValueType value) const { + return (this->mask_ & (static_cast(1) << BitPolicy::to_bit(value))) != 0 ? 1 : 0; + } + + /// Count the number of values in the set + constexpr size_t size() const { + // Brian Kernighan's algorithm - efficient for sparse bitmasks + // Typical case: 2-4 modes out of 10 possible + bitmask_t n = this->mask_; + size_t count = 0; + while (n) { + n &= n - 1; // Clear the least significant set bit + count++; + } + return count; + } + + /// Check if the set is empty + constexpr bool empty() const { return this->mask_ == 0; } + + /// Iterator support for range-based for loops and API encoding + /// Iterates over set bits and converts bit positions to values + /// Optimization: removes bits from mask as we iterate + class Iterator { + public: + using iterator_category = std::forward_iterator_tag; + using value_type = ValueType; + using difference_type = std::ptrdiff_t; + using pointer = const ValueType *; + using reference = ValueType; + + constexpr explicit Iterator(bitmask_t mask) : mask_(mask) {} + + constexpr ValueType operator*() const { + // Return value for the first set bit + return BitPolicy::from_bit(find_next_set_bit(mask_, 0)); + } + + constexpr Iterator &operator++() { + // Clear the lowest set bit (Brian Kernighan's algorithm) + mask_ &= mask_ - 1; + return *this; + } + + constexpr bool operator==(const Iterator &other) const { return mask_ == other.mask_; } + + constexpr bool operator!=(const Iterator &other) const { return !(*this == other); } + + private: + bitmask_t mask_; + }; + + constexpr Iterator begin() const { return Iterator(mask_); } + constexpr Iterator end() const { return Iterator(0); } + + /// Get the raw bitmask value for optimized operations + constexpr bitmask_t get_mask() const { return this->mask_; } + + /// Check if a specific value is present in a raw bitmask + /// Useful for checking intersection results without creating temporary objects + static constexpr bool mask_contains(bitmask_t mask, ValueType value) { + return (mask & (static_cast(1) << BitPolicy::to_bit(value))) != 0; + } + + /// Get the first value from a raw bitmask + /// Used for optimizing intersection logic (e.g., "pick first suitable mode") + static constexpr ValueType first_value_from_mask(bitmask_t mask) { + return BitPolicy::from_bit(find_next_set_bit(mask, 0)); + } + + /// Find the next set bit in a bitmask starting from a given position + /// Returns the bit position, or MAX_BITS if no more bits are set + static constexpr int find_next_set_bit(bitmask_t mask, int start_bit) { + int bit = start_bit; + while (bit < BitPolicy::MAX_BITS && !(mask & (static_cast(1) << bit))) { + ++bit; + } + return bit; + } + + protected: + bitmask_t mask_{0}; +}; + +} // namespace esphome diff --git a/esphome/core/helpers.h b/esphome/core/helpers.h index bc2ed41120..e542805a8d 100644 --- a/esphome/core/helpers.h +++ b/esphome/core/helpers.h @@ -197,12 +197,8 @@ template class FixedVector { size_ = 0; } - public: - FixedVector() = default; - - /// Constructor from initializer list - allocates exact size needed - /// This enables brace initialization: FixedVector v = {1, 2, 3}; - FixedVector(std::initializer_list init_list) { + // Helper to assign from initializer list (shared by constructor and assignment operator) + void assign_from_initializer_list_(std::initializer_list init_list) { init(init_list.size()); size_t idx = 0; for (const auto &item : init_list) { @@ -212,6 +208,13 @@ template class FixedVector { size_ = init_list.size(); } + public: + FixedVector() = default; + + /// Constructor from initializer list - allocates exact size needed + /// This enables brace initialization: FixedVector v = {1, 2, 3}; + FixedVector(std::initializer_list init_list) { assign_from_initializer_list_(init_list); } + ~FixedVector() { cleanup_(); } // Disable copy operations (avoid accidental expensive copies) @@ -237,6 +240,15 @@ template class FixedVector { return *this; } + /// Assignment from initializer list - avoids temporary and move overhead + /// This enables: FixedVector v; v = {1, 2, 3}; + FixedVector &operator=(std::initializer_list init_list) { + cleanup_(); + reset_(); + assign_from_initializer_list_(init_list); + return *this; + } + // Allocate capacity - can be called multiple times to reinit void init(size_t n) { cleanup_(); @@ -301,7 +313,6 @@ template class FixedVector { const T &back() const { return data_[size_ - 1]; } size_t size() const { return size_; } - size_t capacity() const { return capacity_; } bool empty() const { return size_ == 0; } /// Access element without bounds checking (matches std::vector behavior) @@ -1162,18 +1173,4 @@ template::value, int> = 0> T &id(T ///@} -/// @name Deprecated functions -///@{ - -ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1") -inline std::string hexencode(const uint8_t *data, uint32_t len) { return format_hex_pretty(data, len); } - -template -ESPDEPRECATED("hexencode() is deprecated, use format_hex_pretty() instead.", "2022.1") -std::string hexencode(const T &data) { - return hexencode(data.data(), data.size()); -} - -///@} - } // namespace esphome diff --git a/esphome/platformio_api.py b/esphome/platformio_api.py index a4b5b432fd..b7b6cf399d 100644 --- a/esphome/platformio_api.py +++ b/esphome/platformio_api.py @@ -387,22 +387,22 @@ class IDEData: @property def objdump_path(self) -> str: # replace gcc at end with objdump - - # Windows - if self.cc_path.endswith(".exe"): - return f"{self.cc_path[:-7]}objdump.exe" - - return f"{self.cc_path[:-3]}objdump" + path = self.cc_path + return ( + f"{path[:-7]}objdump.exe" + if path.endswith(".exe") + else f"{path[:-3]}objdump" + ) @property def readelf_path(self) -> str: # replace gcc at end with readelf - - # Windows - if self.cc_path.endswith(".exe"): - return f"{self.cc_path[:-7]}readelf.exe" - - return f"{self.cc_path[:-3]}readelf" + path = self.cc_path + return ( + f"{path[:-7]}readelf.exe" + if path.endswith(".exe") + else f"{path[:-3]}readelf" + ) def analyze_memory_usage(config: dict[str, Any]) -> None: diff --git a/platformio.ini b/platformio.ini index 6b2a8657bb..94f58f84ab 100644 --- a/platformio.ini +++ b/platformio.ini @@ -46,6 +46,10 @@ lib_deps = ; This is using the repository until a new release is published to PlatformIO https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library lvgl/lvgl@8.4.0 ; lvgl + ; This dependency is used only in unit tests. + ; Must coincide with PLATFORMIO_GOOGLE_TEST_LIB in scripts/cpp_unit_test.py + ; See scripts/cpp_unit_test.py and tests/components/README.md + google/googletest@^1.15.2 build_flags = -DESPHOME_LOG_LEVEL=ESPHOME_LOG_LEVEL_VERY_VERBOSE -std=gnu++20 diff --git a/requirements_test.txt b/requirements_test.txt index 4c60a31d7f..5f94329e3f 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,4 +1,4 @@ -pylint==4.0.1 +pylint==4.0.2 flake8==7.3.0 # also change in .pre-commit-config.yaml when updating ruff==0.14.1 # also change in .pre-commit-config.yaml when updating pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating diff --git a/script/analyze_component_buses.py b/script/analyze_component_buses.py index d0882e22e9..78f5ca3344 100755 --- a/script/analyze_component_buses.py +++ b/script/analyze_component_buses.py @@ -34,6 +34,8 @@ from typing import Any # Add esphome to path sys.path.insert(0, str(Path(__file__).parent.parent)) +from helpers import BASE_BUS_COMPONENTS + from esphome import yaml_util from esphome.config_helpers import Extend, Remove @@ -67,18 +69,6 @@ NO_BUSES_SIGNATURE = "no_buses" # Isolated components have unique signatures and cannot be merged with others ISOLATED_SIGNATURE_PREFIX = "isolated_" -# Base bus components - these ARE the bus implementations and should not -# be flagged as needing migration since they are the platform/base components -BASE_BUS_COMPONENTS = { - "i2c", - "spi", - "uart", - "modbus", - "canbus", - "remote_transmitter", - "remote_receiver", -} - # Components that must be tested in isolation (not grouped or batched with others) # These have known build issues that prevent grouping # NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py diff --git a/script/api_protobuf/api_protobuf.py b/script/api_protobuf/api_protobuf.py index 4936434fc2..2f83b0bd79 100755 --- a/script/api_protobuf/api_protobuf.py +++ b/script/api_protobuf/api_protobuf.py @@ -1415,7 +1415,13 @@ class RepeatedTypeInfo(TypeInfo): super().__init__(field) # Check if this is a pointer field by looking for container_pointer option self._container_type = get_field_opt(field, pb.container_pointer, "") - self._use_pointer = bool(self._container_type) + # Check for non-template container pointer + self._container_no_template = get_field_opt( + field, pb.container_pointer_no_template, "" + ) + self._use_pointer = bool(self._container_type) or bool( + self._container_no_template + ) # Check if this should use FixedVector instead of std::vector self._use_fixed_vector = get_field_opt(field, pb.fixed_vector, False) @@ -1434,12 +1440,18 @@ class RepeatedTypeInfo(TypeInfo): @property def cpp_type(self) -> str: + if self._container_no_template: + # Non-template container: use type as-is without appending template parameters + return f"const {self._container_no_template}*" if self._use_pointer and self._container_type: # For pointer fields, use the specified container type - # If the container type already includes the element type (e.g., std::set) - # use it as-is, otherwise append the element type + # Two cases: + # 1. "std::set" - Full type with template params, use as-is + # 2. "std::set" - No <>, append the element type if "<" in self._container_type and ">" in self._container_type: + # Has template parameters specified, use as-is return f"const {self._container_type}*" + # No <> at all, append element type return f"const {self._container_type}<{self._ti.cpp_type}>*" if self._use_fixed_vector: return f"FixedVector<{self._ti.cpp_type}>" diff --git a/script/ci_add_metadata_to_json.py b/script/ci_add_metadata_to_json.py new file mode 100755 index 0000000000..687b5131c0 --- /dev/null +++ b/script/ci_add_metadata_to_json.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +"""Add metadata to memory analysis JSON file. + +This script adds components and platform metadata to an existing +memory analysis JSON file. Used by CI to ensure all required fields are present +for the comment script. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import sys + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Add metadata to memory analysis JSON file" + ) + parser.add_argument( + "--json-file", + required=True, + help="Path to JSON file to update", + ) + parser.add_argument( + "--components", + required=True, + help='JSON array of component names (e.g., \'["api", "wifi"]\')', + ) + parser.add_argument( + "--platform", + required=True, + help="Platform name", + ) + + args = parser.parse_args() + + # Load existing JSON + json_path = Path(args.json_file) + if not json_path.exists(): + print(f"Error: JSON file not found: {args.json_file}", file=sys.stderr) + return 1 + + try: + with open(json_path, encoding="utf-8") as f: + data = json.load(f) + except (json.JSONDecodeError, OSError) as e: + print(f"Error loading JSON: {e}", file=sys.stderr) + return 1 + + # Parse components + try: + components = json.loads(args.components) + if not isinstance(components, list): + print("Error: --components must be a JSON array", file=sys.stderr) + return 1 + # Element-level validation: ensure each component is a non-empty string + for idx, comp in enumerate(components): + if not isinstance(comp, str) or not comp.strip(): + print( + f"Error: component at index {idx} is not a non-empty string: {comp!r}", + file=sys.stderr, + ) + return 1 + except json.JSONDecodeError as e: + print(f"Error parsing components: {e}", file=sys.stderr) + return 1 + + # Add metadata + data["components"] = components + data["platform"] = args.platform + + # Write back + try: + with open(json_path, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2) + print(f"Added metadata to {args.json_file}", file=sys.stderr) + except OSError as e: + print(f"Error writing JSON: {e}", file=sys.stderr) + return 1 + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/ci_helpers.py b/script/ci_helpers.py new file mode 100755 index 0000000000..48b0e4bbfe --- /dev/null +++ b/script/ci_helpers.py @@ -0,0 +1,23 @@ +"""Common helper functions for CI scripts.""" + +from __future__ import annotations + +import os + + +def write_github_output(outputs: dict[str, str | int]) -> None: + """Write multiple outputs to GITHUB_OUTPUT or stdout. + + When running in GitHub Actions, writes to the GITHUB_OUTPUT file. + When running locally, writes to stdout for debugging. + + Args: + outputs: Dictionary of key-value pairs to write + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output: + with open(github_output, "a", encoding="utf-8") as f: + f.writelines(f"{key}={value}\n" for key, value in outputs.items()) + else: + for key, value in outputs.items(): + print(f"{key}={value}") diff --git a/script/ci_memory_impact_comment.py b/script/ci_memory_impact_comment.py new file mode 100755 index 0000000000..1331a44d03 --- /dev/null +++ b/script/ci_memory_impact_comment.py @@ -0,0 +1,643 @@ +#!/usr/bin/env python3 +"""Post or update a PR comment with memory impact analysis results. + +This script creates or updates a GitHub PR comment with memory usage changes. +It uses the GitHub CLI (gh) to manage comments and maintains a single comment +that gets updated on subsequent runs. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import subprocess +import sys + +from jinja2 import Environment, FileSystemLoader + +# Add esphome to path for analyze_memory import +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position + +# Comment marker to identify our memory impact comments +COMMENT_MARKER = "" + + +def run_gh_command(args: list[str], operation: str) -> subprocess.CompletedProcess: + """Run a gh CLI command with error handling. + + Args: + args: Command arguments (including 'gh') + operation: Description of the operation for error messages + + Returns: + CompletedProcess result + + Raises: + subprocess.CalledProcessError: If command fails (with detailed error output) + """ + try: + return subprocess.run( + args, + check=True, + capture_output=True, + text=True, + ) + except subprocess.CalledProcessError as e: + print( + f"ERROR: {operation} failed with exit code {e.returncode}", file=sys.stderr + ) + print(f"ERROR: Command: {' '.join(args)}", file=sys.stderr) + print(f"ERROR: stdout: {e.stdout}", file=sys.stderr) + print(f"ERROR: stderr: {e.stderr}", file=sys.stderr) + raise + + +# Thresholds for emoji significance indicators (percentage) +OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes +COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes + +# Display limits for tables +MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table +MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show +MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show +MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show + +# Symbol display formatting +SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using
tag +SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary + +# Component change noise threshold +COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes + +# Template directory +TEMPLATE_DIR = Path(__file__).parent / "templates" + + +def load_analysis_json(json_path: str) -> dict | None: + """Load memory analysis results from JSON file. + + Args: + json_path: Path to analysis JSON file + + Returns: + Dictionary with analysis results or None if file doesn't exist/can't be loaded + """ + json_file = Path(json_path) + if not json_file.exists(): + print(f"Analysis JSON not found: {json_path}", file=sys.stderr) + return None + + try: + with open(json_file, encoding="utf-8") as f: + return json.load(f) + except (json.JSONDecodeError, OSError) as e: + print(f"Failed to load analysis JSON: {e}", file=sys.stderr) + return None + + +def format_bytes(bytes_value: int) -> str: + """Format bytes value with comma separators. + + Args: + bytes_value: Number of bytes + + Returns: + Formatted string with comma separators (e.g., "1,234 bytes") + """ + return f"{bytes_value:,} bytes" + + +def format_change(before: int, after: int, threshold: float | None = None) -> str: + """Format memory change with delta and percentage. + + Args: + before: Memory usage before change (in bytes) + after: Memory usage after change (in bytes) + threshold: Optional percentage threshold for "significant" change. + If provided, adds supplemental emoji (🎉/🚨/🔸/✅) to chart icons. + If None, only shows chart icons (📈/📉/➡️). + + Returns: + Formatted string with delta and percentage + """ + delta = after - before + percentage = 0.0 if before == 0 else (delta / before) * 100 + + # Always use chart icons to show direction + if delta > 0: + delta_str = f"+{delta:,} bytes" + trend_icon = "📈" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🚨" if abs(percentage) > threshold else "🔸" + emoji = f"{trend_icon} {significance}" + else: + emoji = trend_icon + elif delta < 0: + delta_str = f"{delta:,} bytes" + trend_icon = "📉" + # Add supplemental emoji based on threshold if provided + if threshold is not None: + significance = "🎉" if abs(percentage) > threshold else "✅" + emoji = f"{trend_icon} {significance}" + else: + emoji = trend_icon + else: + delta_str = "+0 bytes" + emoji = "➡️" + + # Format percentage with sign + if percentage > 0: + pct_str = f"+{percentage:.2f}%" + elif percentage < 0: + pct_str = f"{percentage:.2f}%" + else: + pct_str = "0.00%" + + return f"{emoji} {delta_str} ({pct_str})" + + +def prepare_symbol_changes_data( + target_symbols: dict | None, pr_symbols: dict | None +) -> dict | None: + """Prepare symbol changes data for template rendering. + + Args: + target_symbols: Symbol name to size mapping for target branch + pr_symbols: Symbol name to size mapping for PR branch + + Returns: + Dictionary with changed, new, and removed symbols, or None if no changes + """ + if not target_symbols or not pr_symbols: + return None + + # Find all symbols that exist in both branches or only in one + all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys()) + + # Track changes + changed_symbols: list[ + tuple[str, int, int, int] + ] = [] # (symbol, target_size, pr_size, delta) + new_symbols: list[tuple[str, int]] = [] # (symbol, size) + removed_symbols: list[tuple[str, int]] = [] # (symbol, size) + + for symbol in all_symbols: + target_size = target_symbols.get(symbol, 0) + pr_size = pr_symbols.get(symbol, 0) + + if target_size == 0 and pr_size > 0: + # New symbol + new_symbols.append((symbol, pr_size)) + elif target_size > 0 and pr_size == 0: + # Removed symbol + removed_symbols.append((symbol, target_size)) + elif target_size != pr_size: + # Changed symbol + delta = pr_size - target_size + changed_symbols.append((symbol, target_size, pr_size, delta)) + + if not changed_symbols and not new_symbols and not removed_symbols: + return None + + # Sort by size/delta + changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True) + new_symbols.sort(key=lambda x: x[1], reverse=True) + removed_symbols.sort(key=lambda x: x[1], reverse=True) + + return { + "changed_symbols": changed_symbols, + "new_symbols": new_symbols, + "removed_symbols": removed_symbols, + } + + +def prepare_component_breakdown_data( + target_analysis: dict | None, pr_analysis: dict | None +) -> list[tuple[str, int, int, int]] | None: + """Prepare component breakdown data for template rendering. + + Args: + target_analysis: Component memory breakdown for target branch + pr_analysis: Component memory breakdown for PR branch + + Returns: + List of tuples (component, target_flash, pr_flash, delta), or None if no changes + """ + if not target_analysis or not pr_analysis: + return None + + # Combine all components from both analyses + all_components = set(target_analysis.keys()) | set(pr_analysis.keys()) + + # Filter to components that have changed (ignoring noise) + changed_components: list[ + tuple[str, int, int, int] + ] = [] # (comp, target_flash, pr_flash, delta) + for comp in all_components: + target_mem = target_analysis.get(comp, {}) + pr_mem = pr_analysis.get(comp, {}) + + target_flash = target_mem.get("flash_total", 0) + pr_flash = pr_mem.get("flash_total", 0) + + # Only include if component has meaningful change (above noise threshold) + delta = pr_flash - target_flash + if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD: + changed_components.append((comp, target_flash, pr_flash, delta)) + + if not changed_components: + return None + + # Sort by absolute delta (largest changes first) + changed_components.sort(key=lambda x: abs(x[3]), reverse=True) + + return changed_components + + +def create_comment_body( + components: list[str], + platform: str, + target_ram: int, + target_flash: int, + pr_ram: int, + pr_flash: int, + target_analysis: dict | None = None, + pr_analysis: dict | None = None, + target_symbols: dict | None = None, + pr_symbols: dict | None = None, +) -> str: + """Create the comment body with memory impact analysis using Jinja2 templates. + + Args: + components: List of component names (merged config) + platform: Platform name + target_ram: RAM usage in target branch + target_flash: Flash usage in target branch + pr_ram: RAM usage in PR branch + pr_flash: Flash usage in PR branch + target_analysis: Optional component breakdown for target branch + pr_analysis: Optional component breakdown for PR branch + target_symbols: Optional symbol map for target branch + pr_symbols: Optional symbol map for PR branch + + Returns: + Formatted comment body + """ + # Set up Jinja2 environment + env = Environment( + loader=FileSystemLoader(TEMPLATE_DIR), + trim_blocks=True, + lstrip_blocks=True, + ) + + # Register custom filters + env.filters["format_bytes"] = format_bytes + env.filters["format_change"] = format_change + + # Prepare template context + context = { + "comment_marker": COMMENT_MARKER, + "platform": platform, + "target_ram": format_bytes(target_ram), + "pr_ram": format_bytes(pr_ram), + "target_flash": format_bytes(target_flash), + "pr_flash": format_bytes(pr_flash), + "ram_change": format_change( + target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD + ), + "flash_change": format_change( + target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD + ), + "component_change_threshold": COMPONENT_CHANGE_THRESHOLD, + } + + # Format components list + if len(components) == 1: + context["components_str"] = f"`{components[0]}`" + context["config_note"] = "a representative test configuration" + else: + context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components)) + context["config_note"] = ( + f"a merged configuration with {len(components)} components" + ) + + # Prepare component breakdown if available + component_breakdown = "" + if target_analysis and pr_analysis: + changed_components = prepare_component_breakdown_data( + target_analysis, pr_analysis + ) + if changed_components: + template = env.get_template("ci_memory_impact_component_breakdown.j2") + component_breakdown = template.render( + changed_components=changed_components, + format_bytes=format_bytes, + format_change=format_change, + component_change_threshold=COMPONENT_CHANGE_THRESHOLD, + max_rows=MAX_COMPONENT_BREAKDOWN_ROWS, + ) + + # Prepare symbol changes if available + symbol_changes = "" + if target_symbols and pr_symbols: + symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols) + if symbol_data: + template = env.get_template("ci_memory_impact_symbol_changes.j2") + symbol_changes = template.render( + **symbol_data, + format_bytes=format_bytes, + format_change=format_change, + max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS, + max_new_rows=MAX_NEW_SYMBOLS_ROWS, + max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS, + symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH, + symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH, + ) + + if not target_analysis or not pr_analysis: + print("No ELF files provided, skipping detailed analysis", file=sys.stderr) + + context["component_breakdown"] = component_breakdown + context["symbol_changes"] = symbol_changes + + # Render main template + template = env.get_template("ci_memory_impact_comment_template.j2") + return template.render(**context) + + +def find_existing_comment(pr_number: str) -> str | None: + """Find existing memory impact comment on the PR. + + Args: + pr_number: PR number + + Returns: + Comment numeric ID if found, None otherwise + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr) + + # Use gh api to get comments directly - this returns the numeric id field + result = run_gh_command( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments", + "--jq", + ".[] | {id, body}", + ], + operation="Get PR comments", + ) + + print( + f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}", + file=sys.stderr, + ) + + # Parse comments and look for our marker + comment_count = 0 + for line in result.stdout.strip().split("\n"): + if not line: + continue + + try: + comment = json.loads(line) + comment_count += 1 + comment_id = comment.get("id") + print( + f"DEBUG: Checking comment {comment_count}: id={comment_id}", + file=sys.stderr, + ) + + body = comment.get("body", "") + if COMMENT_MARKER in body: + print( + f"DEBUG: Found existing comment with id={comment_id}", + file=sys.stderr, + ) + # Return the numeric id + return str(comment_id) + print("DEBUG: Comment does not contain marker", file=sys.stderr) + except json.JSONDecodeError as e: + print(f"DEBUG: JSON decode error: {e}", file=sys.stderr) + continue + + print( + f"DEBUG: No existing comment found (checked {comment_count} comments)", + file=sys.stderr, + ) + return None + + +def update_existing_comment(comment_id: str, comment_body: str) -> None: + """Update an existing comment. + + Args: + comment_id: Comment ID to update + comment_body: New comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr) + print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr) + result = run_gh_command( + [ + "gh", + "api", + f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}", + "-X", + "PATCH", + "-f", + f"body={comment_body}", + ], + operation="Update PR comment", + ) + print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr) + + +def create_new_comment(pr_number: str, comment_body: str) -> None: + """Create a new PR comment. + + Args: + pr_number: PR number + comment_body: Comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr) + print(f"DEBUG: Comment body length: {len(comment_body)} bytes", file=sys.stderr) + result = run_gh_command( + ["gh", "pr", "comment", pr_number, "--body", comment_body], + operation="Create PR comment", + ) + print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr) + + +def post_or_update_comment(pr_number: str, comment_body: str) -> None: + """Post a new comment or update existing one. + + Args: + pr_number: PR number + comment_body: Comment body text + + Raises: + subprocess.CalledProcessError: If gh command fails + """ + # Look for existing comment + existing_comment_id = find_existing_comment(pr_number) + + if existing_comment_id and existing_comment_id != "None": + update_existing_comment(existing_comment_id, comment_body) + else: + create_new_comment(pr_number, comment_body) + + print("Comment posted/updated successfully", file=sys.stderr) + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Post or update PR comment with memory impact analysis" + ) + parser.add_argument("--pr-number", required=True, help="PR number") + parser.add_argument( + "--target-json", + required=True, + help="Path to target branch analysis JSON file", + ) + parser.add_argument( + "--pr-json", + required=True, + help="Path to PR branch analysis JSON file", + ) + + args = parser.parse_args() + + # Load analysis JSON files (all data comes from JSON for security) + target_data: dict | None = load_analysis_json(args.target_json) + if not target_data: + print("Error: Failed to load target analysis JSON", file=sys.stderr) + sys.exit(1) + + pr_data: dict | None = load_analysis_json(args.pr_json) + if not pr_data: + print("Error: Failed to load PR analysis JSON", file=sys.stderr) + sys.exit(1) + + # Extract detailed analysis if available + target_analysis: dict | None = None + pr_analysis: dict | None = None + target_symbols: dict | None = None + pr_symbols: dict | None = None + + if target_data.get("detailed_analysis"): + target_analysis = target_data["detailed_analysis"].get("components") + target_symbols = target_data["detailed_analysis"].get("symbols") + + if pr_data.get("detailed_analysis"): + pr_analysis = pr_data["detailed_analysis"].get("components") + pr_symbols = pr_data["detailed_analysis"].get("symbols") + + # Extract all values from JSON files (prevents shell injection from PR code) + components = target_data.get("components") + platform = target_data.get("platform") + target_ram = target_data.get("ram_bytes") + target_flash = target_data.get("flash_bytes") + pr_ram = pr_data.get("ram_bytes") + pr_flash = pr_data.get("flash_bytes") + + # Validate required fields and types + missing_fields: list[str] = [] + type_errors: list[str] = [] + + if components is None: + missing_fields.append("components") + elif not isinstance(components, list): + type_errors.append( + f"components must be a list, got {type(components).__name__}" + ) + else: + for idx, comp in enumerate(components): + if not isinstance(comp, str): + type_errors.append( + f"components[{idx}] must be a string, got {type(comp).__name__}" + ) + if platform is None: + missing_fields.append("platform") + elif not isinstance(platform, str): + type_errors.append(f"platform must be a string, got {type(platform).__name__}") + + if target_ram is None: + missing_fields.append("target.ram_bytes") + elif not isinstance(target_ram, int): + type_errors.append( + f"target.ram_bytes must be an integer, got {type(target_ram).__name__}" + ) + + if target_flash is None: + missing_fields.append("target.flash_bytes") + elif not isinstance(target_flash, int): + type_errors.append( + f"target.flash_bytes must be an integer, got {type(target_flash).__name__}" + ) + + if pr_ram is None: + missing_fields.append("pr.ram_bytes") + elif not isinstance(pr_ram, int): + type_errors.append( + f"pr.ram_bytes must be an integer, got {type(pr_ram).__name__}" + ) + + if pr_flash is None: + missing_fields.append("pr.flash_bytes") + elif not isinstance(pr_flash, int): + type_errors.append( + f"pr.flash_bytes must be an integer, got {type(pr_flash).__name__}" + ) + + if missing_fields or type_errors: + if missing_fields: + print( + f"Error: JSON files missing required fields: {', '.join(missing_fields)}", + file=sys.stderr, + ) + if type_errors: + print( + f"Error: Type validation failed: {'; '.join(type_errors)}", + file=sys.stderr, + ) + print(f"Target JSON keys: {list(target_data.keys())}", file=sys.stderr) + print(f"PR JSON keys: {list(pr_data.keys())}", file=sys.stderr) + sys.exit(1) + + # Create comment body + # Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run. + comment_body = create_comment_body( + components=components, + platform=platform, + target_ram=target_ram, + target_flash=target_flash, + pr_ram=pr_ram, + pr_flash=pr_flash, + target_analysis=target_analysis, + pr_analysis=pr_analysis, + target_symbols=target_symbols, + pr_symbols=pr_symbols, + ) + + # Post or update comment + post_or_update_comment(args.pr_number, comment_body) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/ci_memory_impact_extract.py b/script/ci_memory_impact_extract.py new file mode 100755 index 0000000000..77d59417e3 --- /dev/null +++ b/script/ci_memory_impact_extract.py @@ -0,0 +1,281 @@ +#!/usr/bin/env python3 +"""Extract memory usage statistics from ESPHome build output. + +This script parses the PlatformIO build output to extract RAM and flash +usage statistics for a compiled component. It's used by the CI workflow to +compare memory usage between branches. + +The script reads compile output from stdin and looks for the standard +PlatformIO output format: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + +Optionally performs detailed memory analysis if a build directory is provided. +""" + +from __future__ import annotations + +import argparse +import json +from pathlib import Path +import re +import sys + +# Add esphome to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +# pylint: disable=wrong-import-position +from esphome.analyze_memory import MemoryAnalyzer +from esphome.platformio_api import IDEData +from script.ci_helpers import write_github_output + +# Regex patterns for extracting memory usage from PlatformIO output +_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes") +_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)") + + +def extract_from_compile_output( + output_text: str, +) -> tuple[int | None, int | None, str | None]: + """Extract memory usage and build directory from PlatformIO compile output. + + Supports multiple builds (for component groups or isolated components). + When test_build_components.py creates multiple builds, this sums the + memory usage across all builds. + + Looks for lines like: + RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes) + Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes) + + Also extracts build directory from lines like: + INFO Compiling app... Build path: /path/to/build + + Args: + output_text: Compile output text (may contain multiple builds) + + Returns: + Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found + """ + # Find all RAM and Flash matches (may be multiple builds) + ram_matches = _RAM_PATTERN.findall(output_text) + flash_matches = _FLASH_PATTERN.findall(output_text) + + if not ram_matches or not flash_matches: + return None, None, None + + # Sum all builds (handles multiple component groups) + total_ram = sum(int(match) for match in ram_matches) + total_flash = sum(int(match) for match in flash_matches) + + # Extract build directory from ESPHome's explicit build path output + # Look for: INFO Compiling app... Build path: /path/to/build + # Note: Multiple builds reuse the same build path (each overwrites the previous) + build_dir = None + if match := _BUILD_PATH_PATTERN.search(output_text): + build_dir = match.group(1).strip() + + return total_ram, total_flash, build_dir + + +def run_detailed_analysis(build_dir: str) -> dict | None: + """Run detailed memory analysis on build directory. + + Args: + build_dir: Path to ESPHome build directory + + Returns: + Dictionary with analysis results or None if analysis fails + """ + build_path = Path(build_dir) + if not build_path.exists(): + print(f"Build directory not found: {build_dir}", file=sys.stderr) + return None + + # Find firmware.elf + elf_path = None + for elf_candidate in [ + build_path / "firmware.elf", + build_path / ".pioenvs" / build_path.name / "firmware.elf", + ]: + if elf_candidate.exists(): + elf_path = str(elf_candidate) + break + + if not elf_path: + print(f"firmware.elf not found in {build_dir}", file=sys.stderr) + return None + + # Find idedata.json - check multiple locations + device_name = build_path.name + idedata_candidates = [ + # In .pioenvs for test builds + build_path / ".pioenvs" / device_name / "idedata.json", + # In .esphome/idedata for regular builds + Path.home() / ".esphome" / "idedata" / f"{device_name}.json", + # Check parent directories for .esphome/idedata (for test_build_components) + build_path.parent.parent.parent / "idedata" / f"{device_name}.json", + ] + + idedata = None + for idedata_path in idedata_candidates: + if not idedata_path.exists(): + continue + try: + with open(idedata_path, encoding="utf-8") as f: + raw_data = json.load(f) + idedata = IDEData(raw_data) + print(f"Loaded idedata from: {idedata_path}", file=sys.stderr) + break + except (json.JSONDecodeError, OSError) as e: + print( + f"Warning: Failed to load idedata from {idedata_path}: {e}", + file=sys.stderr, + ) + + analyzer = MemoryAnalyzer(elf_path, idedata=idedata) + components = analyzer.analyze() + + # Convert to JSON-serializable format + result = { + "components": { + name: { + "text": mem.text_size, + "rodata": mem.rodata_size, + "data": mem.data_size, + "bss": mem.bss_size, + "flash_total": mem.flash_total, + "ram_total": mem.ram_total, + "symbol_count": mem.symbol_count, + } + for name, mem in components.items() + }, + "symbols": {}, + } + + # Build symbol map + for section in analyzer.sections.values(): + for symbol_name, size, _ in section.symbols: + if size > 0: + demangled = analyzer._demangle_symbol(symbol_name) + result["symbols"][demangled] = size + + return result + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Extract memory usage from ESPHome build output" + ) + parser.add_argument( + "--output-env", + action="store_true", + help="Output to GITHUB_OUTPUT environment file", + ) + parser.add_argument( + "--build-dir", + help="Optional build directory for detailed memory analysis (overrides auto-detection)", + ) + parser.add_argument( + "--output-json", + help="Optional path to save detailed analysis JSON", + ) + parser.add_argument( + "--output-build-dir", + help="Optional path to write the detected build directory", + ) + + args = parser.parse_args() + + # Read compile output from stdin + compile_output = sys.stdin.read() + + # Extract memory usage and build directory + ram_bytes, flash_bytes, detected_build_dir = extract_from_compile_output( + compile_output + ) + + if ram_bytes is None or flash_bytes is None: + print("Failed to extract memory usage from compile output", file=sys.stderr) + print("Expected lines like:", file=sys.stderr) + print( + " RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)", + file=sys.stderr, + ) + print( + " Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)", + file=sys.stderr, + ) + return 1 + + # Count how many builds were found + num_builds = len(_RAM_PATTERN.findall(compile_output)) + + if num_builds > 1: + print( + f"Found {num_builds} builds - summing memory usage across all builds", + file=sys.stderr, + ) + print( + "WARNING: Detailed analysis will only cover the last build", + file=sys.stderr, + ) + + print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr) + print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr) + + # Determine which build directory to use (explicit arg overrides auto-detection) + build_dir = args.build_dir or detected_build_dir + + if detected_build_dir: + print(f"Detected build directory: {detected_build_dir}", file=sys.stderr) + if num_builds > 1: + print( + f" (using last of {num_builds} builds for detailed analysis)", + file=sys.stderr, + ) + + # Write build directory to file if requested + if args.output_build_dir and build_dir: + build_dir_path = Path(args.output_build_dir) + build_dir_path.parent.mkdir(parents=True, exist_ok=True) + build_dir_path.write_text(build_dir) + print(f"Wrote build directory to {args.output_build_dir}", file=sys.stderr) + + # Run detailed analysis if build directory available + detailed_analysis = None + if build_dir: + print(f"Running detailed analysis on {build_dir}", file=sys.stderr) + detailed_analysis = run_detailed_analysis(build_dir) + + # Save JSON output if requested + if args.output_json: + output_data = { + "ram_bytes": ram_bytes, + "flash_bytes": flash_bytes, + "detailed_analysis": detailed_analysis, + } + + output_path = Path(args.output_json) + output_path.parent.mkdir(parents=True, exist_ok=True) + with open(output_path, "w", encoding="utf-8") as f: + json.dump(output_data, f, indent=2) + print(f"Saved analysis to {args.output_json}", file=sys.stderr) + + if args.output_env: + # Output to GitHub Actions + write_github_output( + { + "ram_usage": ram_bytes, + "flash_usage": flash_bytes, + } + ) + else: + print(f"{ram_bytes},{flash_bytes}") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/script/cpp_unit_test.py b/script/cpp_unit_test.py new file mode 100755 index 0000000000..e97b5bd7b0 --- /dev/null +++ b/script/cpp_unit_test.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 +import argparse +import hashlib +import os +from pathlib import Path +import subprocess +import sys + +from helpers import get_all_components, get_all_dependencies, root_path + +from esphome.__main__ import command_compile, parse_args +from esphome.config import validate_config +from esphome.core import CORE +from esphome.platformio_api import get_idedata + +# This must coincide with the version in /platformio.ini +PLATFORMIO_GOOGLE_TEST_LIB = "google/googletest@^1.15.2" + +# Path to /tests/components +COMPONENTS_TESTS_DIR: Path = Path(root_path) / "tests" / "components" + + +def hash_components(components: list[str]) -> str: + key = ",".join(components) + return hashlib.sha256(key.encode()).hexdigest()[:16] + + +def filter_components_without_tests(components: list[str]) -> list[str]: + """Filter out components that do not have a corresponding test file. + + This is done by checking if the component's directory contains at + least a .cpp file. + """ + filtered_components: list[str] = [] + for component in components: + test_dir = COMPONENTS_TESTS_DIR / component + if test_dir.is_dir() and any(test_dir.glob("*.cpp")): + filtered_components.append(component) + else: + print( + f"WARNING: No tests found for component '{component}', skipping.", + file=sys.stderr, + ) + return filtered_components + + +def create_test_config(config_name: str, includes: list[str]) -> dict: + """Create ESPHome test configuration for C++ unit tests. + + Args: + config_name: Unique name for this test configuration + includes: List of include folders for the test build + + Returns: + Configuration dict for ESPHome + """ + return { + "esphome": { + "name": config_name, + "friendly_name": "CPP Unit Tests", + "libraries": PLATFORMIO_GOOGLE_TEST_LIB, + "platformio_options": { + "build_type": "debug", + "build_unflags": [ + "-Os", # remove size-opt flag + ], + "build_flags": [ + "-Og", # optimize for debug + ], + "debug_build_flags": [ # only for debug builds + "-g3", # max debug info + "-ggdb3", + ], + }, + "includes": includes, + }, + "host": {}, + "logger": {"level": "DEBUG"}, + } + + +def run_tests(selected_components: list[str]) -> int: + # Skip tests on Windows + if os.name == "nt": + print("Skipping esphome tests on Windows", file=sys.stderr) + return 1 + + # Remove components that do not have tests + components = filter_components_without_tests(selected_components) + + if len(components) == 0: + print( + "No components specified or no tests found for the specified components.", + file=sys.stderr, + ) + return 0 + + components = sorted(components) + + # Obtain possible dependencies for the requested components: + components_with_dependencies = sorted(get_all_dependencies(set(components))) + + # Build a list of include folders, one folder per component containing tests. + # A special replacement main.cpp is located in /tests/components/main.cpp + includes: list[str] = ["main.cpp"] + components + + # Create a unique name for this config based on the actual components being tested + # to maximize cache during testing + config_name: str = "cpptests-" + hash_components(components) + + config = create_test_config(config_name, includes) + + CORE.config_path = COMPONENTS_TESTS_DIR / "dummy.yaml" + CORE.dashboard = None + + # Validate config will expand the above with defaults: + config = validate_config(config, {}) + + # Add all components and dependencies to the base configuration after validation, so their files + # are added to the build. + config.update({key: {} for key in components_with_dependencies}) + + print(f"Testing components: {', '.join(components)}") + CORE.config = config + args = parse_args(["program", "compile", str(CORE.config_path)]) + try: + exit_code: int = command_compile(args, config) + + if exit_code != 0: + print(f"Error compiling unit tests for {', '.join(components)}") + return exit_code + except Exception as e: + print( + f"Error compiling unit tests for {', '.join(components)}. Check path. : {e}" + ) + return 2 + + # After a successful compilation, locate the executable and run it: + idedata = get_idedata(config) + if idedata is None: + print("Cannot find executable") + return 1 + + program_path: str = idedata.raw["prog_path"] + run_cmd: list[str] = [program_path] + run_proc = subprocess.run(run_cmd, check=False) + return run_proc.returncode + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Run C++ unit tests for ESPHome components." + ) + parser.add_argument( + "components", + nargs="*", + help="List of components to test. Use --all to test all known components.", + ) + parser.add_argument("--all", action="store_true", help="Test all known components.") + + args = parser.parse_args() + + if args.all: + components: list[str] = get_all_components() + else: + components: list[str] = args.components + + sys.exit(run_tests(components)) + + +if __name__ == "__main__": + main() diff --git a/script/determine-jobs.py b/script/determine-jobs.py index b000ecee3b..ac384d74f1 100755 --- a/script/determine-jobs.py +++ b/script/determine-jobs.py @@ -10,7 +10,13 @@ what files have changed. It outputs JSON with the following structure: "clang_format": true/false, "python_linters": true/false, "changed_components": ["component1", "component2", ...], - "component_test_count": 5 + "component_test_count": 5, + "memory_impact": { + "should_run": "true/false", + "components": ["component1", "component2", ...], + "platform": "esp32-idf", + "use_merged_config": "true" + } } The CI workflow uses this information to: @@ -20,6 +26,7 @@ The CI workflow uses this information to: - Skip or run Python linters (ruff, flake8, pylint, pyupgrade) - Determine which components to test individually - Decide how to split component tests (if there are many) +- Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes Usage: python script/determine-jobs.py [-b BRANCH] @@ -31,24 +38,73 @@ Options: from __future__ import annotations import argparse +from collections import Counter +from enum import StrEnum from functools import cache import json import os -from pathlib import Path import subprocess import sys from typing import Any from helpers import ( + BASE_BUS_COMPONENTS, CPP_FILE_EXTENSIONS, - ESPHOME_COMPONENTS_PATH, PYTHON_FILE_EXTENSIONS, changed_files, + core_changed, + filter_component_and_test_cpp_files, + filter_component_and_test_files, get_all_dependencies, + get_changed_components, + get_component_from_path, + get_component_test_files, get_components_from_integration_fixtures, + get_components_with_dependencies, + get_cpp_changed_components, + git_ls_files, + parse_test_filename, root_path, ) +# Threshold for splitting clang-tidy jobs +# For small PRs (< 65 files), use nosplit for faster CI +# For large PRs (>= 65 files), use split for better parallelization +CLANG_TIDY_SPLIT_THRESHOLD = 65 + + +class Platform(StrEnum): + """Platform identifiers for memory impact analysis.""" + + ESP8266_ARD = "esp8266-ard" + ESP32_IDF = "esp32-idf" + ESP32_C3_IDF = "esp32-c3-idf" + ESP32_C6_IDF = "esp32-c6-idf" + ESP32_S2_IDF = "esp32-s2-idf" + ESP32_S3_IDF = "esp32-s3-idf" + + +# Memory impact analysis constants +MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes +MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform + +# Platform preference order for memory impact analysis +# This order is used when no platform-specific hints are detected from filenames +# Priority rationale: +# 1. ESP32-C6 IDF - Newest platform, supports Thread/Zigbee +# 2. ESP8266 Arduino - Most memory constrained (best for detecting memory impact), +# fastest build times, most sensitive to code size changes +# 3. ESP32 IDF - Primary ESP32 platform, most representative of modern ESPHome +# 4-6. Other ESP32 variants - Less commonly used but still supported +MEMORY_IMPACT_PLATFORM_PREFERENCE = [ + Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee) + Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained, fastest builds) + Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative) + Platform.ESP32_C3_IDF, # ESP32-C3 IDF + Platform.ESP32_S2_IDF, # ESP32-S2 IDF + Platform.ESP32_S3_IDF, # ESP32-S3 IDF +] + def should_run_integration_tests(branch: str | None = None) -> bool: """Determine if integration tests should run based on changed files. @@ -90,10 +146,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool: """ files = changed_files(branch) - # Check if any core files changed (esphome/core/*) - for file in files: - if file.startswith("esphome/core/"): - return True + if core_changed(files): + # If any core files changed, run integration tests + return True # Check if any integration test files changed if any("tests/integration" in file for file in files): @@ -105,16 +160,33 @@ def should_run_integration_tests(branch: str | None = None) -> bool: # Check if any required components changed for file in files: - if file.startswith(ESPHOME_COMPONENTS_PATH): - parts = file.split("/") - if len(parts) >= 3: - component = parts[2] - if component in all_required_components: - return True + component = get_component_from_path(file) + if component and component in all_required_components: + return True return False +@cache +def _is_clang_tidy_full_scan() -> bool: + """Check if clang-tidy configuration changed (requires full scan). + + Returns: + True if full scan is needed (hash changed), False otherwise. + """ + try: + result = subprocess.run( + [os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"], + capture_output=True, + check=False, + ) + # Exit 0 means hash changed (full scan needed) + return result.returncode == 0 + except Exception: + # If hash check fails, run full scan to be safe + return True + + def should_run_clang_tidy(branch: str | None = None) -> bool: """Determine if clang-tidy should run based on changed files. @@ -151,17 +223,7 @@ def should_run_clang_tidy(branch: str | None = None) -> bool: True if clang-tidy should run, False otherwise. """ # First check if clang-tidy configuration changed (full scan needed) - try: - result = subprocess.run( - [os.path.join(root_path, "script", "clang_tidy_hash.py"), "--check"], - capture_output=True, - check=False, - ) - # Exit 0 means hash changed (full scan needed) - if result.returncode == 0: - return True - except Exception: - # If hash check fails, run clang-tidy to be safe + if _is_clang_tidy_full_scan(): return True # Check if .clang-tidy.hash file itself was changed @@ -173,6 +235,22 @@ def should_run_clang_tidy(branch: str | None = None) -> bool: return _any_changed_file_endswith(branch, CPP_FILE_EXTENSIONS) +def count_changed_cpp_files(branch: str | None = None) -> int: + """Count the number of changed C++ files. + + This is used to determine whether to split clang-tidy jobs or run them as a single job. + For PRs with < 65 changed C++ files, running a single job is faster than splitting. + + Args: + branch: Branch to compare against. If None, uses default. + + Returns: + Number of changed C++ files. + """ + files = changed_files(branch) + return sum(1 for file in files if file.endswith(CPP_FILE_EXTENSIONS)) + + def should_run_clang_format(branch: str | None = None) -> bool: """Determine if clang-format should run based on changed files. @@ -207,6 +285,40 @@ def should_run_python_linters(branch: str | None = None) -> bool: return _any_changed_file_endswith(branch, PYTHON_FILE_EXTENSIONS) +def determine_cpp_unit_tests( + branch: str | None = None, +) -> tuple[bool, list[str]]: + """Determine if C++ unit tests should run based on changed files. + + This function is used by the CI workflow to skip C++ unit tests when + no relevant files have changed, saving CI time and resources. + + C++ unit tests will run when any of the following conditions are met: + + 1. Any C++ core source files changed (esphome/core/*), in which case + all cpp unit tests run. + 2. A test file for a component changed, which triggers tests for that + component. + 3. The code for a component changed, which triggers tests for that + component and all components that depend on it. + + Args: + branch: Branch to compare against. If None, uses default. + + Returns: + Tuple of (run_all, components) where: + - run_all: True if all tests should run, False otherwise + - components: List of specific components to test (empty if run_all) + """ + files = changed_files(branch) + if core_changed(files): + return (True, []) + + # Filter to only C++ files + cpp_files = list(filter(filter_component_and_test_cpp_files, files)) + return (False, get_cpp_changed_components(cpp_files)) + + def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) -> bool: """Check if a changed file ends with any of the specified extensions.""" return any(file.endswith(extensions) for file in changed_files(branch)) @@ -224,10 +336,250 @@ def _component_has_tests(component: str) -> bool: Returns: True if the component has test YAML files """ - tests_dir = Path(root_path) / "tests" / "components" / component - if not tests_dir.exists(): - return False - return any(tests_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component, all_variants=True)) + + +def _select_platform_by_preference( + platforms: list[Platform] | set[Platform], +) -> Platform: + """Select the most preferred platform from a list/set based on MEMORY_IMPACT_PLATFORM_PREFERENCE. + + Args: + platforms: List or set of platforms to choose from + + Returns: + The most preferred platform (earliest in MEMORY_IMPACT_PLATFORM_PREFERENCE) + """ + return min(platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index) + + +def _select_platform_by_count( + platform_counts: Counter[Platform], +) -> Platform: + """Select platform by count, using MEMORY_IMPACT_PLATFORM_PREFERENCE as tiebreaker. + + Args: + platform_counts: Counter mapping platforms to their counts + + Returns: + Platform with highest count, breaking ties by preference order + """ + return min( + platform_counts.keys(), + key=lambda p: ( + -platform_counts[p], # Negative to prefer higher counts + MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p), + ), + ) + + +def _detect_platform_hint_from_filename(filename: str) -> Platform | None: + """Detect platform hint from filename patterns. + + Detects platform-specific files using patterns like: + - wifi_component_esp_idf.cpp, *_idf.h -> ESP32 IDF variants + - wifi_component_esp8266.cpp, *_esp8266.h -> ESP8266_ARD + - *_esp32*.cpp -> ESP32 IDF (generic) + - *_libretiny.cpp, *_retiny.* -> LibreTiny (not in preference list) + - *_pico.cpp, *_rp2040.* -> RP2040 (not in preference list) + + Args: + filename: File path to check + + Returns: + Platform enum if a specific platform is detected, None otherwise + """ + filename_lower = filename.lower() + + # ESP-IDF platforms (check specific variants first) + if "esp_idf" in filename_lower or "_idf" in filename_lower: + # Check for specific ESP32 variants + if "c6" in filename_lower or "esp32c6" in filename_lower: + return Platform.ESP32_C6_IDF + if "c3" in filename_lower or "esp32c3" in filename_lower: + return Platform.ESP32_C3_IDF + if "s2" in filename_lower or "esp32s2" in filename_lower: + return Platform.ESP32_S2_IDF + if "s3" in filename_lower or "esp32s3" in filename_lower: + return Platform.ESP32_S3_IDF + # Default to ESP32 IDF for generic esp_idf files + return Platform.ESP32_IDF + + # ESP8266 Arduino + if "esp8266" in filename_lower: + return Platform.ESP8266_ARD + + # Generic ESP32 (without _idf suffix, could be Arduino or shared code) + # Prefer IDF as it's the modern platform + if "esp32" in filename_lower: + return Platform.ESP32_IDF + + # LibreTiny and RP2040 are not in MEMORY_IMPACT_PLATFORM_PREFERENCE + # so we don't return them as hints + # if "retiny" in filename_lower or "libretiny" in filename_lower: + # return None # No specific LibreTiny platform preference + # if "pico" in filename_lower or "rp2040" in filename_lower: + # return None # No RP2040 platform preference + + return None + + +def detect_memory_impact_config( + branch: str | None = None, +) -> dict[str, Any]: + """Determine memory impact analysis configuration. + + Always runs memory impact analysis when there are changed components, + building a merged configuration with all changed components (like + test_build_components.py does) to get comprehensive memory analysis. + + When platform-specific files are detected (e.g., wifi_component_esp_idf.cpp), + prefers that platform for testing to ensure the most relevant memory analysis. + + For core C++ file changes without component changes, runs a fallback + analysis using a representative component to measure the impact. + + Args: + branch: Branch to compare against + + Returns: + Dictionary with memory impact analysis parameters: + - should_run: "true" or "false" + - components: list of component names to analyze + - platform: platform name for the merged build + - use_merged_config: "true" (always use merged config) + """ + + # Get actually changed files (not dependencies) + files = changed_files(branch) + + # Find all changed components (excluding core and base bus components) + # Also collect platform hints from platform-specific filenames + changed_component_set: set[str] = set() + has_core_cpp_changes = False + platform_hints: list[Platform] = [] + + for file in files: + component = get_component_from_path(file) + if component: + # Skip base bus components as they're used across many builds + if component not in BASE_BUS_COMPONENTS: + changed_component_set.add(component) + # Check if this is a platform-specific file + platform_hint = _detect_platform_hint_from_filename(file) + if platform_hint: + platform_hints.append(platform_hint) + elif file.startswith("esphome/") and file.endswith(CPP_FILE_EXTENSIONS): + # Core ESPHome C++ files changed (not component-specific) + # Only C++ files affect memory usage + has_core_cpp_changes = True + + # If no components changed but core C++ changed, test representative component + force_fallback_platform = False + if not changed_component_set and has_core_cpp_changes: + print( + f"Memory impact: No components changed, but core C++ files changed. " + f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.", + file=sys.stderr, + ) + changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT) + force_fallback_platform = True # Use fallback platform (most representative) + elif not changed_component_set: + # No components and no core C++ changes + return {"should_run": "false"} + + # Find components that have tests and collect their supported platforms + components_with_tests: list[str] = [] + component_platforms_map: dict[ + str, set[Platform] + ] = {} # Track which platforms each component supports + + for component in sorted(changed_component_set): + # Look for test files on preferred platforms + test_files = get_component_test_files(component, all_variants=True) + if not test_files: + continue + + # Check if component has tests for any preferred platform + available_platforms = [ + platform + for test_file in test_files + if (platform := parse_test_filename(test_file)[1]) != "all" + and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE + ] + + if not available_platforms: + continue + + component_platforms_map[component] = set(available_platforms) + components_with_tests.append(component) + + # If no components have tests, don't run memory impact + if not components_with_tests: + return {"should_run": "false"} + + # Find common platforms supported by ALL components + # This ensures we can build all components together in a merged config + common_platforms = set(MEMORY_IMPACT_PLATFORM_PREFERENCE) + for component, platforms in component_platforms_map.items(): + common_platforms &= platforms + + # Select the most preferred platform from the common set + # Priority order: + # 1. Platform hints from filenames (e.g., wifi_component_esp_idf.cpp suggests ESP32_IDF) + # 2. Core changes use fallback platform (most representative of codebase) + # 3. Common platforms supported by all components + # 4. Most commonly supported platform + if platform_hints: + # Use most common platform hint that's also supported by all components + hint_counts = Counter(platform_hints) + # Filter to only hints that are in common_platforms (if any common platforms exist) + valid_hints = ( + [h for h in hint_counts if h in common_platforms] + if common_platforms + else list(hint_counts.keys()) + ) + if valid_hints: + platform = _select_platform_by_count( + Counter({p: hint_counts[p] for p in valid_hints}) + ) + elif common_platforms: + # Hints exist but none match common platforms, use common platform logic + platform = _select_platform_by_preference(common_platforms) + else: + # Use the most common hint even if it's not in common platforms + platform = _select_platform_by_count(hint_counts) + elif force_fallback_platform: + platform = MEMORY_IMPACT_FALLBACK_PLATFORM + elif common_platforms: + # Pick the most preferred platform that all components support + platform = _select_platform_by_preference(common_platforms) + else: + # No common platform - pick the most commonly supported platform + # Count how many components support each platform + platform_counts = Counter( + p for platforms in component_platforms_map.values() for p in platforms + ) + platform = _select_platform_by_count(platform_counts) + + # Debug output + print("Memory impact analysis:", file=sys.stderr) + print(f" Changed components: {sorted(changed_component_set)}", file=sys.stderr) + print(f" Components with tests: {components_with_tests}", file=sys.stderr) + print( + f" Component platforms: {dict(sorted(component_platforms_map.items()))}", + file=sys.stderr, + ) + print(f" Platform hints from filenames: {platform_hints}", file=sys.stderr) + print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr) + print(f" Selected platform: {platform}", file=sys.stderr) + + return { + "should_run": "true", + "components": components_with_tests, + "platform": platform, + "use_merged_config": "true", + } def main() -> None: @@ -245,17 +597,33 @@ def main() -> None: run_clang_tidy = should_run_clang_tidy(args.branch) run_clang_format = should_run_clang_format(args.branch) run_python_linters = should_run_python_linters(args.branch) + changed_cpp_file_count = count_changed_cpp_files(args.branch) - # Get both directly changed and all changed components (with dependencies) in one call - script_path = Path(__file__).parent / "list-components.py" - cmd = [sys.executable, str(script_path), "--changed-with-deps"] - if args.branch: - cmd.extend(["-b", args.branch]) + # Get changed components + # get_changed_components() returns: + # None: Core files changed (need full scan) + # []: No components changed + # [list]: Changed components (already includes dependencies) + changed_components_result = get_changed_components() - result = subprocess.run(cmd, capture_output=True, text=True, check=True) - component_data = json.loads(result.stdout) - directly_changed_components = component_data["directly_changed"] - changed_components = component_data["all_changed"] + # Always analyze component files, even if core files changed + # This is needed for component testing and memory impact analysis + changed = changed_files(args.branch) + component_files = [f for f in changed if filter_component_and_test_files(f)] + + directly_changed_components = get_components_with_dependencies( + component_files, False + ) + + if changed_components_result is None: + # Core files changed - will trigger full clang-tidy scan + # But we still need to track changed components for testing and memory analysis + changed_components = get_components_with_dependencies(component_files, True) + is_core_change = True + else: + # Use the result from get_changed_components() which includes dependencies + changed_components = changed_components_result + is_core_change = False # Filter to only components that have test files # Components without tests shouldn't generate CI test jobs @@ -266,11 +634,11 @@ def main() -> None: # Get directly changed components with tests (for isolated testing) # These will be tested WITHOUT --testing-mode in CI to enable full validation # (pin conflicts, etc.) since they contain the actual changes being reviewed - directly_changed_with_tests = [ + directly_changed_with_tests = { component for component in directly_changed_components if _component_has_tests(component) - ] + } # Get dependency-only components (for grouped testing) dependency_only_components = [ @@ -279,19 +647,63 @@ def main() -> None: if component not in directly_changed_components ] + # Detect components for memory impact analysis (merged config) + memory_impact = detect_memory_impact_config(args.branch) + + # Determine clang-tidy mode based on actual files that will be checked + if run_clang_tidy: + # Full scan needed if: hash changed OR core files changed + is_full_scan = _is_clang_tidy_full_scan() or is_core_change + + if is_full_scan: + # Full scan checks all files - always use split mode for efficiency + clang_tidy_mode = "split" + files_to_check_count = -1 # Sentinel value for "all files" + else: + # Targeted scan - calculate actual files that will be checked + # This accounts for component dependencies, not just directly changed files + if changed_components: + # Count C++ files in all changed components (including dependencies) + all_cpp_files = list(git_ls_files(["*.cpp"]).keys()) + component_set = set(changed_components) + files_to_check_count = sum( + 1 + for f in all_cpp_files + if get_component_from_path(f) in component_set + ) + else: + # If no components changed, use the simple count of changed C++ files + files_to_check_count = changed_cpp_file_count + + if files_to_check_count < CLANG_TIDY_SPLIT_THRESHOLD: + clang_tidy_mode = "nosplit" + else: + clang_tidy_mode = "split" + else: + clang_tidy_mode = "disabled" + files_to_check_count = 0 + # Build output + # Determine which C++ unit tests to run + cpp_run_all, cpp_components = determine_cpp_unit_tests(args.branch) + output: dict[str, Any] = { "integration_tests": run_integration, "clang_tidy": run_clang_tidy, + "clang_tidy_mode": clang_tidy_mode, "clang_format": run_clang_format, "python_linters": run_python_linters, "changed_components": changed_components, "changed_components_with_tests": changed_components_with_tests, - "directly_changed_components_with_tests": directly_changed_with_tests, + "directly_changed_components_with_tests": list(directly_changed_with_tests), "dependency_only_components_with_tests": dependency_only_components, "component_test_count": len(changed_components_with_tests), "directly_changed_count": len(directly_changed_with_tests), "dependency_only_count": len(dependency_only_components), + "changed_cpp_file_count": changed_cpp_file_count, + "memory_impact": memory_impact, + "cpp_unit_tests_run_all": cpp_run_all, + "cpp_unit_tests_components": cpp_components, } # Output as JSON diff --git a/script/extract_automations.py b/script/extract_automations.py index 943eb7110a..4e650ce25f 100755 --- a/script/extract_automations.py +++ b/script/extract_automations.py @@ -2,19 +2,14 @@ import json -from helpers import git_ls_files +from helpers import get_all_component_files, get_components_with_dependencies from esphome.automation import ACTION_REGISTRY, CONDITION_REGISTRY from esphome.pins import PIN_SCHEMA_REGISTRY -list_components = __import__("list-components") - - if __name__ == "__main__": - files = git_ls_files() - files = filter(list_components.filter_component_files, files) - - components = list_components.get_components(files, True) + files = get_all_component_files() + components = get_components_with_dependencies(files, True) dump = { "actions": sorted(list(ACTION_REGISTRY.keys())), diff --git a/script/helpers.py b/script/helpers.py index 61306b9489..78c11b427e 100644 --- a/script/helpers.py +++ b/script/helpers.py @@ -1,5 +1,6 @@ from __future__ import annotations +from collections.abc import Callable from functools import cache import json import os @@ -7,6 +8,7 @@ import os.path from pathlib import Path import re import subprocess +import sys import time from typing import Any @@ -23,12 +25,33 @@ CPP_FILE_EXTENSIONS = (".cpp", ".h", ".hpp", ".cc", ".cxx", ".c", ".tcc") # Python file extensions PYTHON_FILE_EXTENSIONS = (".py", ".pyi") +# Combined C++ and Python file extensions for convenience +CPP_AND_PYTHON_FILE_EXTENSIONS = (*CPP_FILE_EXTENSIONS, *PYTHON_FILE_EXTENSIONS) + # YAML file extensions YAML_FILE_EXTENSIONS = (".yaml", ".yml") # Component path prefix ESPHOME_COMPONENTS_PATH = "esphome/components/" +# Test components path prefix +ESPHOME_TESTS_COMPONENTS_PATH = "tests/components/" + +# Tuple of component and test paths for efficient startswith checks +COMPONENT_AND_TESTS_PATHS = (ESPHOME_COMPONENTS_PATH, ESPHOME_TESTS_COMPONENTS_PATH) + +# Base bus components - these ARE the bus implementations and should not +# be flagged as needing migration since they are the platform/base components +BASE_BUS_COMPONENTS = { + "i2c", + "spi", + "uart", + "modbus", + "canbus", + "remote_transmitter", + "remote_receiver", +} + def parse_list_components_output(output: str) -> list[str]: """Parse the output from list-components.py script. @@ -46,6 +69,65 @@ def parse_list_components_output(output: str) -> list[str]: return [c.strip() for c in output.strip().split("\n") if c.strip()] +def parse_test_filename(test_file: Path) -> tuple[str, str]: + """Parse test filename to extract test name and platform. + + Test files follow the naming pattern: test..yaml or test-..yaml + + Args: + test_file: Path to test file + + Returns: + Tuple of (test_name, platform) + """ + parts = test_file.stem.split(".") + if len(parts) == 2: + return parts[0], parts[1] # test, platform + return parts[0], "all" + + +def get_component_from_path(file_path: str) -> str | None: + """Extract component name from a file path. + + Args: + file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp") + + Returns: + Component name if path is in components directory, None otherwise + """ + if not file_path.startswith(ESPHOME_COMPONENTS_PATH): + return None + parts = file_path.split("/") + if len(parts) >= 3: + return parts[2] + return None + + +def get_component_test_files( + component: str, *, all_variants: bool = False +) -> list[Path]: + """Get test files for a component. + + Args: + component: Component name (e.g., "wifi") + all_variants: If True, returns all test files including variants (test-*.yaml). + If False, returns only base test files (test.*.yaml). + Default is False. + + Returns: + List of test file paths for the component, or empty list if none exist + """ + tests_dir = Path(root_path) / "tests" / "components" / component + if not tests_dir.exists(): + return [] + + if all_variants: + # Match both test.*.yaml and test-*.yaml patterns + return list(tests_dir.glob("test[.-]*.yaml")) + # Match only test.*.yaml (base tests) + return list(tests_dir.glob("test.*.yaml")) + + def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str: prefix = "".join(color) if isinstance(color, tuple) else color suffix = colorama.Style.RESET_ALL if reset else "" @@ -233,7 +315,10 @@ def get_changed_components() -> list[str] | None: for f in changed ) if core_cpp_changed: - print("Core C++/header files changed - will run full clang-tidy scan") + print( + "Core C++/header files changed - will run full clang-tidy scan", + file=sys.stderr, + ) return None # Use list-components.py to get changed components @@ -247,7 +332,10 @@ def get_changed_components() -> list[str] | None: return parse_list_components_output(result.stdout) except subprocess.CalledProcessError: # If the script fails, fall back to full scan - print("Could not determine changed components - will run full clang-tidy scan") + print( + "Could not determine changed components - will run full clang-tidy scan", + file=sys.stderr, + ) return None @@ -299,14 +387,14 @@ def _filter_changed_ci(files: list[str]) -> list[str]: if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH) ] if not files: - print("No files changed") + print("No files changed", file=sys.stderr) return files # Scenario 3: Specific components changed # Action: Check ALL files in each changed component # Convert component list to set for O(1) lookups component_set = set(components) - print(f"Changed components: {', '.join(sorted(components))}") + print(f"Changed components: {', '.join(sorted(components))}", file=sys.stderr) # The 'files' parameter contains ALL files in the codebase that clang-tidy would check. # We filter this down to only files in the changed components. @@ -314,11 +402,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]: # because changes in one file can affect other files in the same component. filtered_files = [] for f in files: - if f.startswith(ESPHOME_COMPONENTS_PATH): - # Check if file belongs to any of the changed components - parts = f.split("/") - if len(parts) >= 3 and parts[2] in component_set: - filtered_files.append(f) + component = get_component_from_path(f) + if component and component in component_set: + filtered_files.append(f) return filtered_files @@ -579,3 +665,313 @@ def get_components_from_integration_fixtures() -> set[str]: components.add(item["platform"]) return components + + +def filter_component_and_test_files(file_path: str) -> bool: + """Check if a file path is a component or test file. + + Args: + file_path: Path to check + + Returns: + True if the file is in a component or test directory + """ + return file_path.startswith(COMPONENT_AND_TESTS_PATHS) or ( + file_path.startswith(ESPHOME_TESTS_COMPONENTS_PATH) + and file_path.endswith(YAML_FILE_EXTENSIONS) + ) + + +def filter_component_and_test_cpp_files(file_path: str) -> bool: + """Check if a file is a C++ source file in component or test directories. + + Args: + file_path: Path to check + + Returns: + True if the file is a C++ source/header file in component or test directories + """ + return file_path.endswith(CPP_FILE_EXTENSIONS) and file_path.startswith( + COMPONENT_AND_TESTS_PATHS + ) + + +def extract_component_names_from_files(files: list[str]) -> list[str]: + """Extract unique component names from a list of file paths. + + Args: + files: List of file paths + + Returns: + List of unique component names (preserves order) + """ + return list( + dict.fromkeys(comp for file in files if (comp := get_component_from_path(file))) + ) + + +def add_item_to_components_graph( + components_graph: dict[str, list[str]], parent: str, child: str +) -> None: + """Add a dependency relationship to the components graph. + + Args: + components_graph: Graph mapping parent components to their children + parent: Parent component name + child: Child component name (dependent) + """ + if not parent.startswith("__") and parent != child: + if parent not in components_graph: + components_graph[parent] = [] + if child not in components_graph[parent]: + components_graph[parent].append(child) + + +def resolve_auto_load( + auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]], + config: dict | None = None, +) -> list[str]: + """Resolve AUTO_LOAD to a list, handling callables with or without config parameter. + + Args: + auto_load: The AUTO_LOAD value (list or callable) + config: Optional config to pass to callable AUTO_LOAD functions + + Returns: + List of component names to auto-load + """ + if not callable(auto_load): + return auto_load + + import inspect + + if inspect.signature(auto_load).parameters: + return auto_load(config) + return auto_load() + + +def create_components_graph() -> dict[str, list[str]]: + """Create a graph of component dependencies. + + Returns: + Dictionary mapping parent components to their children (dependencies) + """ + from pathlib import Path + + from esphome import const + from esphome.core import CORE + from esphome.loader import ComponentManifest, get_component, get_platform + + # The root directory of the repo + root = Path(__file__).parent.parent + components_dir = root / ESPHOME_COMPONENTS_PATH + # Fake some directory so that get_component works + CORE.config_path = root + # Various configuration to capture different outcomes used by `AUTO_LOAD` function. + KEY_CORE = const.KEY_CORE + KEY_TARGET_FRAMEWORK = const.KEY_TARGET_FRAMEWORK + KEY_TARGET_PLATFORM = const.KEY_TARGET_PLATFORM + PLATFORM_ESP32 = const.PLATFORM_ESP32 + PLATFORM_ESP8266 = const.PLATFORM_ESP8266 + + TARGET_CONFIGURATIONS = [ + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None}, + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32}, + {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266}, + ] + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + components_graph = {} + platforms = [] + components: list[tuple[ComponentManifest, str, Path]] = [] + + for path in components_dir.iterdir(): + if not path.is_dir(): + continue + if not (path / "__init__.py").is_file(): + continue + name = path.name + comp = get_component(name) + if comp is None: + raise RuntimeError( + f"Cannot find component {name}. Make sure current path is pip installed ESPHome" + ) + + components.append((comp, name, path)) + if comp.is_platform_component: + platforms.append(name) + + platforms = set(platforms) + + for comp, name, path in components: + for dependency in comp.dependencies: + add_item_to_components_graph( + components_graph, dependency.split(".")[0], name + ) + + for target_config in TARGET_CONFIGURATIONS: + CORE.data[KEY_CORE] = target_config + for item in resolve_auto_load(comp.auto_load, config=None): + add_item_to_components_graph(components_graph, item, name) + # restore config + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + for platform_path in path.iterdir(): + platform_name = platform_path.stem + if platform_name == name or platform_name not in platforms: + continue + platform = get_platform(platform_name, name) + if platform is None: + continue + + add_item_to_components_graph(components_graph, platform_name, name) + + for dependency in platform.dependencies: + add_item_to_components_graph( + components_graph, dependency.split(".")[0], name + ) + + for target_config in TARGET_CONFIGURATIONS: + CORE.data[KEY_CORE] = target_config + for item in resolve_auto_load(platform.auto_load, config={}): + add_item_to_components_graph(components_graph, item, name) + # restore config + CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] + + return components_graph + + +def find_children_of_component( + components_graph: dict[str, list[str]], component_name: str, depth: int = 0 +) -> list[str]: + """Find all components that depend on the given component (recursively). + + Args: + components_graph: Graph mapping parent components to their children + component_name: Component name to find children for + depth: Current recursion depth (max 10) + + Returns: + List of all dependent component names (may contain duplicates removed at end) + """ + if component_name not in components_graph: + return [] + + children = [] + + for child in components_graph[component_name]: + children.append(child) + if depth < 10: + children.extend( + find_children_of_component(components_graph, child, depth + 1) + ) + # Remove duplicate values + return list(set(children)) + + +def get_components_with_dependencies( + files: list[str], get_dependencies: bool = False +) -> list[str]: + """Get component names from files, optionally including their dependencies. + + Args: + files: List of file paths + get_dependencies: If True, include all dependent components + + Returns: + Sorted list of component names + """ + components = extract_component_names_from_files(files) + + if get_dependencies: + components_graph = create_components_graph() + + all_components = components.copy() + for c in components: + all_components.extend(find_children_of_component(components_graph, c)) + # Remove duplicate values + all_changed_components = list(set(all_components)) + + return sorted(all_changed_components) + + return sorted(components) + + +def get_all_component_files() -> list[str]: + """Get all component and test files from git. + + Returns: + List of all component and test file paths + """ + files = git_ls_files() + return list(filter(filter_component_and_test_files, files)) + + +def get_all_components() -> list[str]: + """Get all component names. + + This function uses git to find all component files and extracts the component names. + It returns the same list as calling list-components.py without arguments. + + Returns: + List of all component names + """ + return get_components_with_dependencies(get_all_component_files(), False) + + +def core_changed(files: list[str]) -> bool: + """Check if any core C++ or Python files have changed. + + Args: + files: List of file paths to check + + Returns: + True if any core C++ or Python files have changed + """ + return any( + f.startswith("esphome/core/") and f.endswith(CPP_AND_PYTHON_FILE_EXTENSIONS) + for f in files + ) + + +def get_cpp_changed_components(files: list[str]) -> list[str]: + """Get components that have changed C++ files or tests. + + This function analyzes a list of changed files and determines which components + are affected. It handles two scenarios: + + 1. Test files changed (tests/components//*.cpp): + - Adds the component to the affected list + - Only that component needs to be tested + + 2. Component C++ files changed (esphome/components//*): + - Adds the component to the affected list + - Also adds all components that depend on this component (recursively) + - This ensures that changes propagate to dependent components + + Args: + files: List of file paths to analyze (should be C++ files) + + Returns: + Sorted list of component names that need C++ unit tests run + """ + components_graph = create_components_graph() + affected: set[str] = set() + for file in files: + if not file.endswith(CPP_FILE_EXTENSIONS): + continue + if file.startswith(ESPHOME_TESTS_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 4: + component_dir = Path(ESPHOME_TESTS_COMPONENTS_PATH) / parts[2] + if component_dir.is_dir(): + affected.add(parts[2]) + elif file.startswith(ESPHOME_COMPONENTS_PATH): + parts = file.split("/") + if len(parts) >= 4: + component = parts[2] + affected.update(find_children_of_component(components_graph, component)) + affected.add(component) + return sorted(affected) diff --git a/script/helpers_zephyr.py b/script/helpers_zephyr.py index 922f1171b4..f72b335e64 100644 --- a/script/helpers_zephyr.py +++ b/script/helpers_zephyr.py @@ -25,6 +25,7 @@ int main() { return 0;} Path(zephyr_dir / "prj.conf").write_text( """ CONFIG_NEWLIB_LIBC=y +CONFIG_BT=y CONFIG_ADC=y """, encoding="utf-8", diff --git a/script/list-components.py b/script/list-components.py index 9abb2bc345..31a1609f88 100755 --- a/script/list-components.py +++ b/script/list-components.py @@ -1,182 +1,14 @@ #!/usr/bin/env python3 import argparse -from collections.abc import Callable -from pathlib import Path -import sys -from helpers import changed_files, git_ls_files - -from esphome.const import ( - KEY_CORE, - KEY_TARGET_FRAMEWORK, - KEY_TARGET_PLATFORM, - PLATFORM_ESP32, - PLATFORM_ESP8266, +from helpers import ( + changed_files, + filter_component_and_test_cpp_files, + filter_component_and_test_files, + get_all_component_files, + get_components_with_dependencies, + get_cpp_changed_components, ) -from esphome.core import CORE -from esphome.loader import ComponentManifest, get_component, get_platform - - -def filter_component_files(str): - return str.startswith("esphome/components/") | str.startswith("tests/components/") - - -def get_all_component_files() -> list[str]: - """Get all component files from git.""" - files = git_ls_files() - return list(filter(filter_component_files, files)) - - -def extract_component_names_array_from_files_array(files): - components = [] - for file in files: - file_parts = file.split("/") - if len(file_parts) >= 4: - component_name = file_parts[2] - if component_name not in components: - components.append(component_name) - return components - - -def add_item_to_components_graph(components_graph, parent, child): - if not parent.startswith("__") and parent != child: - if parent not in components_graph: - components_graph[parent] = [] - if child not in components_graph[parent]: - components_graph[parent].append(child) - - -def resolve_auto_load( - auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]], - config: dict | None = None, -) -> list[str]: - """Resolve AUTO_LOAD to a list, handling callables with or without config parameter. - - Args: - auto_load: The AUTO_LOAD value (list or callable) - config: Optional config to pass to callable AUTO_LOAD functions - - Returns: - List of component names to auto-load - """ - if not callable(auto_load): - return auto_load - - import inspect - - if inspect.signature(auto_load).parameters: - return auto_load(config) - return auto_load() - - -def create_components_graph(): - # The root directory of the repo - root = Path(__file__).parent.parent - components_dir = root / "esphome" / "components" - # Fake some directory so that get_component works - CORE.config_path = root - # Various configuration to capture different outcomes used by `AUTO_LOAD` function. - TARGET_CONFIGURATIONS = [ - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None}, - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32}, - {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266}, - ] - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - components_graph = {} - platforms = [] - components: list[tuple[ComponentManifest, str, Path]] = [] - - for path in components_dir.iterdir(): - if not path.is_dir(): - continue - if not (path / "__init__.py").is_file(): - continue - name = path.name - comp = get_component(name) - if comp is None: - print( - f"Cannot find component {name}. Make sure current path is pip installed ESPHome" - ) - sys.exit(1) - - components.append((comp, name, path)) - if comp.is_platform_component: - platforms.append(name) - - platforms = set(platforms) - - for comp, name, path in components: - for dependency in comp.dependencies: - add_item_to_components_graph( - components_graph, dependency.split(".")[0], name - ) - - for target_config in TARGET_CONFIGURATIONS: - CORE.data[KEY_CORE] = target_config - for item in resolve_auto_load(comp.auto_load, config=None): - add_item_to_components_graph(components_graph, item, name) - # restore config - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - for platform_path in path.iterdir(): - platform_name = platform_path.stem - if platform_name == name or platform_name not in platforms: - continue - platform = get_platform(platform_name, name) - if platform is None: - continue - - add_item_to_components_graph(components_graph, platform_name, name) - - for dependency in platform.dependencies: - add_item_to_components_graph( - components_graph, dependency.split(".")[0], name - ) - - for target_config in TARGET_CONFIGURATIONS: - CORE.data[KEY_CORE] = target_config - for item in resolve_auto_load(platform.auto_load, config={}): - add_item_to_components_graph(components_graph, item, name) - # restore config - CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0] - - return components_graph - - -def find_children_of_component(components_graph, component_name, depth=0): - if component_name not in components_graph: - return [] - - children = [] - - for child in components_graph[component_name]: - children.append(child) - if depth < 10: - children.extend( - find_children_of_component(components_graph, child, depth + 1) - ) - # Remove duplicate values - return list(set(children)) - - -def get_components(files: list[str], get_dependencies: bool = False): - components = extract_component_names_array_from_files_array(files) - - if get_dependencies: - components_graph = create_components_graph() - - all_components = components.copy() - for c in components: - all_components.extend(find_children_of_component(components_graph, c)) - # Remove duplicate values - all_changed_components = list(set(all_components)) - - return sorted(all_changed_components) - - return sorted(components) def main(): @@ -203,16 +35,29 @@ def main(): parser.add_argument( "-b", "--branch", help="Branch to compare changed files against" ) + parser.add_argument( + "--cpp-changed", + action="store_true", + help="List components with changed C++ files", + ) args = parser.parse_args() if args.branch and not ( - args.changed or args.changed_direct or args.changed_with_deps + args.changed + or args.changed_direct + or args.changed_with_deps + or args.cpp_changed ): parser.error( - "--branch requires --changed, --changed-direct, or --changed-with-deps" + "--branch requires --changed, --changed-direct, --changed-with-deps, or --cpp-changed" ) - if args.changed or args.changed_direct or args.changed_with_deps: + if ( + args.changed + or args.changed_direct + or args.changed_with_deps + or args.cpp_changed + ): # When --changed* is passed, only get the changed files changed = changed_files(args.branch) @@ -232,6 +77,11 @@ def main(): # - --changed-with-deps: Used by CI test determination (script/determine-jobs.py) # Returns: Components with code changes + their dependencies (not infrastructure) # Reason: CI needs to test changed components and their dependents + # + # - --cpp-changed: Used by CI to determine if any C++ files changed (script/determine-jobs.py) + # Returns: Only components with changed C++ files + # Reason: Only components with C++ changes need C++ testing + base_test_changed = any( "tests/test_build_components" in file for file in changed ) @@ -244,7 +94,7 @@ def main(): # Only look at changed component files (ignore infrastructure changes) # For --changed-direct: only actual component code changes matter (for isolation) # For --changed-with-deps: only actual component code changes matter (for testing) - files = [f for f in changed if filter_component_files(f)] + files = [f for f in changed if filter_component_and_test_files(f)] else: # Get all component files files = get_all_component_files() @@ -253,8 +103,8 @@ def main(): # Return JSON with both directly changed and all changed components import json - directly_changed = get_components(files, False) - all_changed = get_components(files, True) + directly_changed = get_components_with_dependencies(files, False) + all_changed = get_components_with_dependencies(files, True) output = { "directly_changed": directly_changed, "all_changed": all_changed, @@ -262,11 +112,16 @@ def main(): print(json.dumps(output)) elif args.changed_direct: # Return only directly changed components (without dependencies) - for c in get_components(files, False): + for c in get_components_with_dependencies(files, False): + print(c) + elif args.cpp_changed: + # Only look at changed cpp files + files = list(filter(filter_component_and_test_cpp_files, changed)) + for c in get_cpp_changed_components(files): print(c) else: # Return all changed components (with dependencies) - default behavior - for c in get_components(files, args.changed): + for c in get_components_with_dependencies(files, args.changed): print(c) diff --git a/script/split_components_for_ci.py b/script/split_components_for_ci.py index dff46d3619..87da540d43 100755 --- a/script/split_components_for_ci.py +++ b/script/split_components_for_ci.py @@ -28,6 +28,7 @@ from script.analyze_component_buses import ( create_grouping_signature, merge_compatible_bus_groups, ) +from script.helpers import get_component_test_files # Weighting for batch creation # Isolated components can't be grouped/merged, so they count as 10x @@ -45,17 +46,12 @@ def has_test_files(component_name: str, tests_dir: Path) -> bool: Args: component_name: Name of the component - tests_dir: Path to tests/components directory + tests_dir: Path to tests/components directory (unused, kept for compatibility) Returns: - True if the component has test.*.yaml files + True if the component has test.*.yaml or test-*.yaml files """ - component_dir = tests_dir / component_name - if not component_dir.exists() or not component_dir.is_dir(): - return False - - # Check for test.*.yaml files - return any(component_dir.glob("test.*.yaml")) + return bool(get_component_test_files(component_name, all_variants=True)) def create_intelligent_batches( @@ -122,8 +118,13 @@ def create_intelligent_batches( continue # Get signature from any platform (they should all have the same buses) - # Components not in component_buses were filtered out by has_test_files check - comp_platforms = component_buses[component] + # Components not in component_buses may only have variant-specific tests + comp_platforms = component_buses.get(component) + if not comp_platforms: + # Component has tests but no analyzable base config - treat as no buses + signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component) + continue + for platform, buses in comp_platforms.items(): if buses: signature = create_grouping_signature({platform: buses}, platform) diff --git a/script/templates/ci_memory_impact_comment_template.j2 b/script/templates/ci_memory_impact_comment_template.j2 new file mode 100644 index 0000000000..9fbf78e99f --- /dev/null +++ b/script/templates/ci_memory_impact_comment_template.j2 @@ -0,0 +1,27 @@ +{{ comment_marker }} +## Memory Impact Analysis + +**Components:** {{ components_str }} +**Platform:** `{{ platform }}` + +| Metric | Target Branch | This PR | Change | +|--------|--------------|---------|--------| +| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} | +| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} | +{% if component_breakdown %} +{{ component_breakdown }} +{% endif %} +{% if symbol_changes %} +{{ symbol_changes }} +{% endif %} +{%- if target_cache_hit %} + +> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI). +{%- endif %} + +--- +> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation). +> **Dynamic memory (heap)** cannot be measured automatically. +> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues. + +*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.* diff --git a/script/templates/ci_memory_impact_component_breakdown.j2 b/script/templates/ci_memory_impact_component_breakdown.j2 new file mode 100644 index 0000000000..a781e5c546 --- /dev/null +++ b/script/templates/ci_memory_impact_component_breakdown.j2 @@ -0,0 +1,15 @@ + +
+📊 Component Memory Breakdown + +| Component | Target Flash | PR Flash | Change | +|-----------|--------------|----------|--------| +{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%} +{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%} +| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} | +{% endfor -%} +{% if changed_components|length > max_rows -%} +| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* | +{% endif -%} + +
diff --git a/script/templates/ci_memory_impact_macros.j2 b/script/templates/ci_memory_impact_macros.j2 new file mode 100644 index 0000000000..9fb346a7c5 --- /dev/null +++ b/script/templates/ci_memory_impact_macros.j2 @@ -0,0 +1,8 @@ +{#- Macro for formatting symbol names in tables -#} +{%- macro format_symbol(symbol, max_length, truncate_length) -%} +{%- if symbol|length <= max_length -%} +`{{ symbol }}` +{%- else -%} +
{{ symbol[:truncate_length] }}...{{ symbol }}
+{%- endif -%} +{%- endmacro -%} diff --git a/script/templates/ci_memory_impact_symbol_changes.j2 b/script/templates/ci_memory_impact_symbol_changes.j2 new file mode 100644 index 0000000000..60f2f50e48 --- /dev/null +++ b/script/templates/ci_memory_impact_symbol_changes.j2 @@ -0,0 +1,51 @@ +{%- from 'ci_memory_impact_macros.j2' import format_symbol -%} + +
+🔍 Symbol-Level Changes (click to expand) + +{% if changed_symbols %} + +### Changed Symbols + +| Symbol | Target Size | PR Size | Change | +|--------|-------------|---------|--------| +{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} | +{% endfor -%} +{% if changed_symbols|length > max_changed_rows -%} +| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* | +{% endif -%} + +{% endif %} +{% if new_symbols %} + +### New Symbols (top {{ max_new_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in new_symbols[:max_new_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if new_symbols|length > max_new_rows -%} +{% set total_new_size = new_symbols|sum(attribute=1) -%} +| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* | +{% endif -%} + +{% endif %} +{% if removed_symbols %} + +### Removed Symbols (top {{ max_removed_rows }}) + +| Symbol | Size | +|--------|------| +{% for symbol, size in removed_symbols[:max_removed_rows] -%} +| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} | +{% endfor -%} +{% if removed_symbols|length > max_removed_rows -%} +{% set total_removed_size = removed_symbols|sum(attribute=1) -%} +| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* | +{% endif -%} + +{% endif %} + +
diff --git a/script/test_build_components.py b/script/test_build_components.py index df092c091d..e369b0364e 100755 --- a/script/test_build_components.py +++ b/script/test_build_components.py @@ -39,6 +39,7 @@ from script.analyze_component_buses import ( merge_compatible_bus_groups, uses_local_file_references, ) +from script.helpers import get_component_test_files from script.merge_component_configs import merge_component_configs @@ -82,13 +83,14 @@ def show_disk_space_if_ci(esphome_command: str) -> None: def find_component_tests( - components_dir: Path, component_pattern: str = "*" + components_dir: Path, component_pattern: str = "*", base_only: bool = False ) -> dict[str, list[Path]]: """Find all component test files. Args: components_dir: Path to tests/components directory component_pattern: Glob pattern for component names + base_only: If True, only find base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Dictionary mapping component name to list of test files @@ -99,9 +101,10 @@ def find_component_tests( if not comp_dir.is_dir(): continue - # Find test files matching test.*.yaml or test-*.yaml patterns - for test_file in comp_dir.glob("test[.-]*.yaml"): - component_tests[comp_dir.name].append(test_file) + # Get test files using helper function + test_files = get_component_test_files(comp_dir.name, all_variants=not base_only) + if test_files: + component_tests[comp_dir.name] = test_files return dict(component_tests) @@ -931,6 +934,7 @@ def test_components( continue_on_fail: bool, enable_grouping: bool = True, isolated_components: set[str] | None = None, + base_only: bool = False, ) -> int: """Test components with optional intelligent grouping. @@ -944,6 +948,7 @@ def test_components( These are tested WITHOUT --testing-mode to enable full validation (pin conflicts, etc). This is used in CI for directly changed components to catch issues that would be missed with --testing-mode. + base_only: If True, only test base test files (test.*.yaml), not variant files (test-*.yaml) Returns: Exit code (0 for success, 1 for failure) @@ -961,11 +966,33 @@ def test_components( # Find all component tests all_tests = {} for pattern in component_patterns: - all_tests.update(find_component_tests(tests_dir, pattern)) + # Skip empty patterns (happens when components list is empty string) + if not pattern: + continue + all_tests.update(find_component_tests(tests_dir, pattern, base_only)) + # If no components found, build a reference configuration for baseline comparison + # Create a synthetic "empty" component test that will build just the base config if not all_tests: print(f"No components found matching: {component_patterns}") - return 1 + print( + "Building reference configuration with no components for baseline comparison..." + ) + + # Create empty test files for each platform (or filtered platform) + reference_tests: list[Path] = [] + for platform_name, base_file in platform_bases.items(): + if platform_filter and not platform_name.startswith(platform_filter): + continue + # Create an empty test file named to match the platform + empty_test_file = build_dir / f"reference.{platform_name}.yaml" + empty_test_file.write_text( + "# Empty component test for baseline reference\n" + ) + reference_tests.append(empty_test_file) + + # Add to all_tests dict with component name "reference" + all_tests["reference"] = reference_tests print(f"Found {len(all_tests)} components to test") @@ -1122,6 +1149,11 @@ def main() -> int: "These are tested WITHOUT --testing-mode to enable full validation. " "Used in CI for directly changed components to catch pin conflicts and other issues.", ) + parser.add_argument( + "--base-only", + action="store_true", + help="Only test base test files (test.*.yaml), not variant files (test-*.yaml)", + ) args = parser.parse_args() @@ -1140,6 +1172,7 @@ def main() -> int: continue_on_fail=args.continue_on_fail, enable_grouping=not args.no_grouping, isolated_components=isolated_components, + base_only=args.base_only, ) diff --git a/tests/component_tests/packages/test_packages.py b/tests/component_tests/packages/test_packages.py index 4712daad0d..d66ca58a69 100644 --- a/tests/component_tests/packages/test_packages.py +++ b/tests/component_tests/packages/test_packages.py @@ -6,6 +6,7 @@ from unittest.mock import MagicMock, patch import pytest from esphome.components.packages import do_packages_pass +from esphome.config import resolve_extend_remove from esphome.config_helpers import Extend, Remove import esphome.config_validation as cv from esphome.const import ( @@ -64,13 +65,20 @@ def fixture_basic_esphome(): return {CONF_NAME: TEST_DEVICE_NAME, CONF_PLATFORM: TEST_PLATFORM} +def packages_pass(config): + """Wrapper around packages_pass that also resolves Extend and Remove.""" + config = do_packages_pass(config) + resolve_extend_remove(config) + return config + + def test_package_unused(basic_esphome, basic_wifi): """ Ensures do_package_pass does not change a config if packages aren't used. """ config = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi} - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == config @@ -83,7 +91,7 @@ def test_package_invalid_dict(basic_esphome, basic_wifi): config = {CONF_ESPHOME: basic_esphome, CONF_PACKAGES: basic_wifi | {CONF_URL: ""}} with pytest.raises(cv.Invalid): - do_packages_pass(config) + packages_pass(config) def test_package_include(basic_wifi, basic_esphome): @@ -99,7 +107,7 @@ def test_package_include(basic_wifi, basic_esphome): expected = {CONF_ESPHOME: basic_esphome, CONF_WIFI: basic_wifi} - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -124,7 +132,7 @@ def test_package_append(basic_wifi, basic_esphome): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -148,7 +156,7 @@ def test_package_override(basic_wifi, basic_esphome): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -177,7 +185,7 @@ def test_multiple_package_order(): }, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -233,7 +241,7 @@ def test_package_list_merge(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -311,7 +319,7 @@ def test_package_list_merge_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -350,13 +358,13 @@ def test_package_merge_by_id_with_list(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected def test_package_merge_by_missing_id(): """ - Ensures that components with missing IDs are not merged. + Ensures that a validation error is thrown when trying to extend a missing ID. """ config = { @@ -379,25 +387,15 @@ def test_package_merge_by_missing_id(): ], } - expected = { - CONF_SENSOR: [ - { - CONF_ID: TEST_SENSOR_ID_1, - CONF_FILTERS: [{CONF_MULTIPLY: 42.0}], - }, - { - CONF_ID: TEST_SENSOR_ID_1, - CONF_FILTERS: [{CONF_MULTIPLY: 10.0}], - }, - { - CONF_ID: Extend(TEST_SENSOR_ID_2), - CONF_FILTERS: [{CONF_OFFSET: 146.0}], - }, - ] - } + error_raised = False + try: + packages_pass(config) + assert False, "Expected validation error for missing ID" + except cv.Invalid as err: + error_raised = True + assert err.path == [CONF_SENSOR, 2] - actual = do_packages_pass(config) - assert actual == expected + assert error_raised def test_package_list_remove_by_id(): @@ -447,7 +445,7 @@ def test_package_list_remove_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -493,7 +491,7 @@ def test_multiple_package_list_remove_by_id(): ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -514,7 +512,7 @@ def test_package_dict_remove_by_id(basic_wifi, basic_esphome): CONF_ESPHOME: basic_esphome, } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -545,7 +543,6 @@ def test_package_remove_by_missing_id(): } expected = { - "missing_key": Remove(), CONF_SENSOR: [ { CONF_ID: TEST_SENSOR_ID_1, @@ -555,14 +552,10 @@ def test_package_remove_by_missing_id(): CONF_ID: TEST_SENSOR_ID_1, CONF_FILTERS: [{CONF_MULTIPLY: 10.0}], }, - { - CONF_ID: Remove(TEST_SENSOR_ID_2), - CONF_FILTERS: [{CONF_OFFSET: 146.0}], - }, ], } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -634,7 +627,7 @@ def test_remote_packages_with_files_list( ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected @@ -730,5 +723,5 @@ def test_remote_packages_with_files_and_vars( ] } - actual = do_packages_pass(config) + actual = packages_pass(config) assert actual == expected diff --git a/tests/components/.gitignore b/tests/components/.gitignore new file mode 100644 index 0000000000..d8b4157aef --- /dev/null +++ b/tests/components/.gitignore @@ -0,0 +1,5 @@ +# Gitignore settings for ESPHome +# This is an example and may include too much for your use-case. +# You can modify this file to suit your needs. +/.esphome/ +/secrets.yaml diff --git a/tests/components/README.md b/tests/components/README.md new file mode 100644 index 0000000000..0901f2ef17 --- /dev/null +++ b/tests/components/README.md @@ -0,0 +1,32 @@ +# How to write C++ ESPHome unit tests + +1. Locate the folder with your component or create a new one with the same name as the component. +2. Write the tests. You can add as many `.cpp` and `.h` files as you need to organize your tests. + +**IMPORTANT**: wrap all your testing code in a unique namespace to avoid linker collisions when compiling +testing binaries that combine many components. By convention, this unique namespace is `esphome::component::testing` +(where "component" is the component under test), for example: `esphome::uart::testing`. + + +## Running component unit tests + +(from the repository root) +```bash +./script/cpp_unit_test.py component1 component2 ... +``` + +The above will compile and run the provided components and their tests. + +To run all tests, you can invoke `cpp_unit_test.py` with the special `--all` flag: + +```bash +./script/cpp_unit_test.py --all +``` + +To run a specific test suite, you can provide a Google Test filter: + +```bash +GTEST_FILTER='UART*' ./script/cpp_unit_test.py uart modbus +``` + +The process will return `0` for success or nonzero for failure. In case of failure, the errors will be printed out to the console. diff --git a/tests/components/binary_sensor/common.yaml b/tests/components/binary_sensor/common.yaml index ed6322768f..e3fd159b08 100644 --- a/tests/components/binary_sensor/common.yaml +++ b/tests/components/binary_sensor/common.yaml @@ -37,3 +37,102 @@ binary_sensor: format: "New state is %s" args: ['x.has_value() ? ONOFF(x) : "Unknown"'] - binary_sensor.invalidate_state: some_binary_sensor + + # Test autorepeat with default configuration (no timings) + - platform: template + id: autorepeat_default + name: "Autorepeat Default" + filters: + - autorepeat: + + # Test autorepeat with single timing entry + - platform: template + id: autorepeat_single + name: "Autorepeat Single" + filters: + - autorepeat: + - delay: 2s + time_off: 200ms + time_on: 800ms + + # Test autorepeat with three timing entries + - platform: template + id: autorepeat_multiple + name: "Autorepeat Multiple" + filters: + - autorepeat: + - delay: 500ms + time_off: 50ms + time_on: 950ms + - delay: 2s + time_off: 100ms + time_on: 900ms + - delay: 10s + time_off: 200ms + time_on: 800ms + + # Test on_multi_click with single click + - platform: template + id: multi_click_single + name: "Multi Click Single" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Single click detected" + + # Test on_multi_click with double click + - platform: template + id: multi_click_double + name: "Multi Click Double" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + then: + - logger.log: "Double click detected" + + # Test on_multi_click with complex pattern (5 events) + - platform: template + id: multi_click_complex + name: "Multi Click Complex" + on_multi_click: + - timing: + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + max_length: 350ms + - state: false + min_length: 50ms + max_length: 350ms + - state: true + min_length: 50ms + then: + - logger.log: "Complex pattern detected" + + # Test on_multi_click with custom invalid_cooldown + - platform: template + id: multi_click_cooldown + name: "Multi Click Cooldown" + on_multi_click: + - timing: + - state: true + min_length: 100ms + max_length: 500ms + invalid_cooldown: 2s + then: + - logger.log: "Click with custom cooldown" diff --git a/tests/components/ble_nus/test.nrf52-adafruit.yaml b/tests/components/ble_nus/test.nrf52-adafruit.yaml new file mode 100644 index 0000000000..20eec16956 --- /dev/null +++ b/tests/components/ble_nus/test.nrf52-adafruit.yaml @@ -0,0 +1,2 @@ +ble_nus: + type: logs diff --git a/tests/components/ble_nus/test.nrf52-mcumgr.yaml b/tests/components/ble_nus/test.nrf52-mcumgr.yaml new file mode 100644 index 0000000000..20eec16956 --- /dev/null +++ b/tests/components/ble_nus/test.nrf52-mcumgr.yaml @@ -0,0 +1,2 @@ +ble_nus: + type: logs diff --git a/tests/components/climate/common.yaml b/tests/components/climate/common.yaml new file mode 100644 index 0000000000..ff405b68e2 --- /dev/null +++ b/tests/components/climate/common.yaml @@ -0,0 +1,31 @@ +switch: + - platform: template + id: climate_heater_switch + optimistic: true + - platform: template + id: climate_cooler_switch + optimistic: true + +sensor: + - platform: template + id: climate_temperature_sensor + lambda: |- + return 21.5; + update_interval: 60s + +climate: + - platform: bang_bang + id: climate_test_climate + name: Test Climate + sensor: climate_temperature_sensor + default_target_temperature_low: 18°C + default_target_temperature_high: 24°C + idle_action: + - switch.turn_off: climate_heater_switch + - switch.turn_off: climate_cooler_switch + cool_action: + - switch.turn_on: climate_cooler_switch + - switch.turn_off: climate_heater_switch + heat_action: + - switch.turn_on: climate_heater_switch + - switch.turn_off: climate_cooler_switch diff --git a/tests/components/climate/test.esp8266-ard.yaml b/tests/components/climate/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/climate/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/esp32_improv/common.yaml b/tests/components/esp32_improv/common.yaml index 7eb3f9c0be..7dc2f7b6c7 100644 --- a/tests/components/esp32_improv/common.yaml +++ b/tests/components/esp32_improv/common.yaml @@ -16,3 +16,4 @@ esp32_improv: authorizer: io0_button authorized_duration: 1min status_indicator: built_in_led + next_url: "https://example.com/setup?device={{device_name}}&ip={{ip_address}}&version={{esphome_version}}" diff --git a/tests/components/espnow/common.yaml b/tests/components/espnow/common.yaml index abb31c12b8..895ffb9d15 100644 --- a/tests/components/espnow/common.yaml +++ b/tests/components/espnow/common.yaml @@ -1,4 +1,5 @@ espnow: + id: espnow_component auto_add_peer: false channel: 1 peers: @@ -50,3 +51,26 @@ espnow: - format_mac_address_pretty(info.src_addr).c_str() - format_hex_pretty(data, size).c_str() - info.rx_ctrl->rssi + +packet_transport: + - platform: espnow + id: transport1 + espnow_id: espnow_component + peer_address: "FF:FF:FF:FF:FF:FF" + encryption: + key: "0123456789abcdef0123456789abcdef" + sensors: + - temp_sensor + providers: + - name: test_provider + encryption: + key: "0123456789abcdef0123456789abcdef" + +sensor: + - platform: internal_temperature + id: temp_sensor + + - platform: packet_transport + provider: test_provider + remote_id: temp_sensor + id: remote_temp diff --git a/tests/components/ethernet/common-dp83848.yaml b/tests/components/ethernet/common-dp83848.yaml index 7cedfeaf08..f9069c5fb9 100644 --- a/tests/components/ethernet/common-dp83848.yaml +++ b/tests/components/ethernet/common-dp83848.yaml @@ -1,12 +1,12 @@ ethernet: type: DP83848 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ip101.yaml b/tests/components/ethernet/common-ip101.yaml index 2dece15171..cea7a5cc35 100644 --- a/tests/components/ethernet/common-ip101.yaml +++ b/tests/components/ethernet/common-ip101.yaml @@ -1,12 +1,12 @@ ethernet: type: IP101 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-jl1101.yaml b/tests/components/ethernet/common-jl1101.yaml index b6ea884102..7b0a2dfdc4 100644 --- a/tests/components/ethernet/common-jl1101.yaml +++ b/tests/components/ethernet/common-jl1101.yaml @@ -1,12 +1,12 @@ ethernet: type: JL1101 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ksz8081.yaml b/tests/components/ethernet/common-ksz8081.yaml index f70d42319e..65541832c2 100644 --- a/tests/components/ethernet/common-ksz8081.yaml +++ b/tests/components/ethernet/common-ksz8081.yaml @@ -1,12 +1,12 @@ ethernet: type: KSZ8081 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-ksz8081rna.yaml b/tests/components/ethernet/common-ksz8081rna.yaml index 18efdae0e1..f04cba15b2 100644 --- a/tests/components/ethernet/common-ksz8081rna.yaml +++ b/tests/components/ethernet/common-ksz8081rna.yaml @@ -1,12 +1,12 @@ ethernet: type: KSZ8081RNA mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-lan8670.yaml b/tests/components/ethernet/common-lan8670.yaml index ec2f24273d..fb751ebd23 100644 --- a/tests/components/ethernet/common-lan8670.yaml +++ b/tests/components/ethernet/common-lan8670.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8670 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-lan8720.yaml b/tests/components/ethernet/common-lan8720.yaml index 204c1d9210..838d57df28 100644 --- a/tests/components/ethernet/common-lan8720.yaml +++ b/tests/components/ethernet/common-lan8720.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8720 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet/common-rtl8201.yaml b/tests/components/ethernet/common-rtl8201.yaml index 8b9f2b86f2..0e7cbe73c6 100644 --- a/tests/components/ethernet/common-rtl8201.yaml +++ b/tests/components/ethernet/common-rtl8201.yaml @@ -1,12 +1,12 @@ ethernet: type: RTL8201 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/ethernet_info/common.yaml b/tests/components/ethernet_info/common.yaml index f45f345316..b720521d10 100644 --- a/tests/components/ethernet_info/common.yaml +++ b/tests/components/ethernet_info/common.yaml @@ -1,12 +1,12 @@ ethernet: type: LAN8720 mdc_pin: 23 - mdio_pin: 25 + mdio_pin: 32 clk: pin: 0 mode: CLK_EXT_IN phy_addr: 0 - power_pin: 26 + power_pin: 33 manual_ip: static_ip: 192.168.178.56 gateway: 192.168.178.1 diff --git a/tests/components/fan/common.yaml b/tests/components/fan/common.yaml new file mode 100644 index 0000000000..55c2a656fd --- /dev/null +++ b/tests/components/fan/common.yaml @@ -0,0 +1,11 @@ +fan: + - platform: template + id: test_fan + name: "Test Fan" + preset_modes: + - Eco + - Sleep + - Turbo + has_oscillating: true + has_direction: true + speed_count: 3 diff --git a/tests/components/fan/test.esp8266-ard.yaml b/tests/components/fan/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/fan/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/gpio/common.yaml b/tests/components/gpio/common.yaml index 4e237349d9..b8e8fa81e4 100644 --- a/tests/components/gpio/common.yaml +++ b/tests/components/gpio/common.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: ${switch_pin} id: gpio_switch + + - platform: gpio + pin: ${switch_pin_2} + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: ${switch_pin_3} + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: ${switch_pin_4} + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.esp32-c3-idf.yaml b/tests/components/gpio/test.esp32-c3-idf.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.esp32-c3-idf.yaml +++ b/tests/components/gpio/test.esp32-c3-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml diff --git a/tests/components/gpio/test.esp32-idf.yaml b/tests/components/gpio/test.esp32-idf.yaml index 09f41abb79..862aa533ea 100644 --- a/tests/components/gpio/test.esp32-idf.yaml +++ b/tests/components/gpio/test.esp32-idf.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO12 output_pin: GPIO13 switch_pin: GPIO14 + switch_pin_2: GPIO15 + switch_pin_3: GPIO16 + switch_pin_4: GPIO17 <<: !include common.yaml diff --git a/tests/components/gpio/test.esp8266-ard.yaml b/tests/components/gpio/test.esp8266-ard.yaml index e1660ec47c..e13b4520d1 100644 --- a/tests/components/gpio/test.esp8266-ard.yaml +++ b/tests/components/gpio/test.esp8266-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO0 output_pin: GPIO2 switch_pin: GPIO15 + switch_pin_2: GPIO12 + switch_pin_3: GPIO13 + switch_pin_4: GPIO14 <<: !include common.yaml diff --git a/tests/components/gpio/test.nrf52-adafruit.yaml b/tests/components/gpio/test.nrf52-adafruit.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-adafruit.yaml +++ b/tests/components/gpio/test.nrf52-adafruit.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.nrf52-mcumgr.yaml b/tests/components/gpio/test.nrf52-mcumgr.yaml index 912b9537c4..fb3f368e03 100644 --- a/tests/components/gpio/test.nrf52-mcumgr.yaml +++ b/tests/components/gpio/test.nrf52-mcumgr.yaml @@ -12,3 +12,20 @@ switch: - platform: gpio pin: P1.2 id: gpio_switch + + - platform: gpio + pin: P1.3 + id: gpio_switch_interlock_1 + interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3] + interlock_wait_time: 100ms + + - platform: gpio + pin: P1.4 + id: gpio_switch_interlock_2 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3] + + - platform: gpio + pin: P1.5 + id: gpio_switch_interlock_3 + interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2] + interlock_wait_time: 50ms diff --git a/tests/components/gpio/test.rp2040-ard.yaml b/tests/components/gpio/test.rp2040-ard.yaml index fc7c9942d0..e9071b4356 100644 --- a/tests/components/gpio/test.rp2040-ard.yaml +++ b/tests/components/gpio/test.rp2040-ard.yaml @@ -2,5 +2,8 @@ substitutions: binary_sensor_pin: GPIO2 output_pin: GPIO3 switch_pin: GPIO4 + switch_pin_2: GPIO5 + switch_pin_3: GPIO6 + switch_pin_4: GPIO7 <<: !include common.yaml diff --git a/tests/components/json/common.yaml b/tests/components/json/common.yaml new file mode 100644 index 0000000000..f4074e1172 --- /dev/null +++ b/tests/components/json/common.yaml @@ -0,0 +1,33 @@ +json: + +interval: + - interval: 60s + then: + - lambda: |- + // Test build_json + std::string json_str = esphome::json::build_json([](JsonObject root) { + root["sensor"] = "temperature"; + root["value"] = 23.5; + root["unit"] = "°C"; + }); + ESP_LOGD("test", "Built JSON: %s", json_str.c_str()); + + // Test parse_json + bool parse_ok = esphome::json::parse_json(json_str, [](JsonObject root) { + if (root.containsKey("sensor") && root.containsKey("value")) { + const char* sensor = root["sensor"]; + float value = root["value"]; + ESP_LOGD("test", "Parsed: sensor=%s, value=%.1f", sensor, value); + } else { + ESP_LOGD("test", "Parsed JSON missing required keys"); + } + }); + ESP_LOGD("test", "Parse result (JSON syntax only): %s", parse_ok ? "success" : "failed"); + + // Test JsonBuilder class + esphome::json::JsonBuilder builder; + JsonObject obj = builder.root(); + obj["test"] = "direct_builder"; + obj["count"] = 42; + std::string result = builder.serialize(); + ESP_LOGD("test", "JsonBuilder result: %s", result.c_str()); diff --git a/tests/components/json/test.esp32-idf.yaml b/tests/components/json/test.esp32-idf.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/json/test.esp32-idf.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/json/test.esp8266-ard.yaml b/tests/components/json/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/json/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/light/common.yaml b/tests/components/light/common.yaml index d4f64dcdea..247fc19aba 100644 --- a/tests/components/light/common.yaml +++ b/tests/components/light/common.yaml @@ -17,6 +17,20 @@ esphome: relative_brightness: 5% brightness_limits: max_brightness: 90% + - light.turn_on: + id: test_addressable_transition + brightness: 50% + red: 100% + green: 0% + blue: 0% + transition_length: 500ms + - light.turn_on: + id: test_addressable_transition + brightness: 100% + red: 0% + green: 100% + blue: 0% + transition_length: 1s light: - platform: binary @@ -123,3 +137,49 @@ light: red: 100% green: 50% blue: 50% + # Test StrobeLightEffect with multiple colors + - platform: monochromatic + id: test_strobe_multiple + name: Strobe Multiple Colors + output: test_ledc_1 + effects: + - strobe: + name: Strobe Multi + colors: + - state: true + brightness: 100% + duration: 500ms + - state: false + duration: 250ms + - state: true + brightness: 50% + duration: 500ms + # Test StrobeLightEffect with transition + - platform: rgb + id: test_strobe_transition + name: Strobe With Transition + red: test_ledc_1 + green: test_ledc_2 + blue: test_ledc_3 + effects: + - strobe: + name: Strobe Transition + colors: + - state: true + red: 100% + green: 0% + blue: 0% + duration: 1s + transition_length: 500ms + - state: true + red: 0% + green: 100% + blue: 0% + duration: 1s + transition_length: 500ms + - platform: partition + id: test_addressable_transition + name: Addressable Transition Test + default_transition_length: 1s + segments: + - single_light_id: test_rgb_light diff --git a/tests/components/main.cpp b/tests/components/main.cpp new file mode 100644 index 0000000000..928f0e6059 --- /dev/null +++ b/tests/components/main.cpp @@ -0,0 +1,26 @@ +#include + +/* +This special main.cpp replaces the default one. +It will run all the Google Tests found in all compiled cpp files and then exit with the result +See README.md for more information +*/ + +// Auto generated code by esphome +// ========== AUTO GENERATED INCLUDE BLOCK BEGIN =========== +// ========== AUTO GENERATED INCLUDE BLOCK END ===========" + +void original_setup() { + // This function won't be run. + + // ========== AUTO GENERATED CODE BEGIN =========== + // =========== AUTO GENERATED CODE END ============ +} + +void setup() { + ::testing::InitGoogleTest(); + int exit_code = RUN_ALL_TESTS(); + exit(exit_code); +} + +void loop() {} diff --git a/tests/components/sensor/common.yaml b/tests/components/sensor/common.yaml new file mode 100644 index 0000000000..2180f66da8 --- /dev/null +++ b/tests/components/sensor/common.yaml @@ -0,0 +1,238 @@ +sensor: + # Source sensor for testing filters + - platform: template + name: "Source Sensor" + id: source_sensor + lambda: return 42.0; + update_interval: 1s + + # Streaming filters (window_size == send_every) - uses StreamingFilter base class + - platform: copy + source_id: source_sensor + name: "Streaming Min Filter" + filters: + - min: + window_size: 10 + send_every: 10 # Batch window → StreamingMinFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Max Filter" + filters: + - max: + window_size: 10 + send_every: 10 # Batch window → StreamingMaxFilter + + - platform: copy + source_id: source_sensor + name: "Streaming Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 10 # Batch window → StreamingMovingAverageFilter + + # Sliding window filters (window_size != send_every) - uses SlidingWindowFilter base class with ring buffer + - platform: copy + source_id: source_sensor + name: "Sliding Min Filter" + filters: + - min: + window_size: 10 + send_every: 5 # Sliding window → MinFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Max Filter" + filters: + - max: + window_size: 10 + send_every: 5 # Sliding window → MaxFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Median Filter" + filters: + - median: + window_size: 10 + send_every: 5 # Sliding window → MedianFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Quantile Filter" + filters: + - quantile: + window_size: 10 + send_every: 5 + quantile: 0.9 # Sliding window → QuantileFilter with ring buffer + + - platform: copy + source_id: source_sensor + name: "Sliding Moving Average Filter" + filters: + - sliding_window_moving_average: + window_size: 10 + send_every: 5 # Sliding window → SlidingWindowMovingAverageFilter with ring buffer + + # Edge cases + - platform: copy + source_id: source_sensor + name: "Large Batch Window Min" + filters: + - min: + window_size: 1000 + send_every: 1000 # Large batch → StreamingMinFilter (4 bytes, not 4KB) + + - platform: copy + source_id: source_sensor + name: "Small Sliding Window" + filters: + - median: + window_size: 3 + send_every: 1 # Frequent output → MedianFilter with 3-element ring buffer + + # send_first_at parameter test + - platform: copy + source_id: source_sensor + name: "Early Send Filter" + filters: + - max: + window_size: 10 + send_every: 10 + send_first_at: 1 # Send after first value + + # ValueListFilter-based filters tests + # FilterOutValueFilter - single value + - platform: copy + source_id: source_sensor + name: "Filter Out Single Value" + filters: + - filter_out: 42.0 # Should filter out exactly 42.0 + + # FilterOutValueFilter - multiple values + - platform: copy + source_id: source_sensor + name: "Filter Out Multiple Values" + filters: + - filter_out: [0.0, 42.0, 100.0] # List of values to filter + + # FilterOutValueFilter - with NaN + - platform: copy + source_id: source_sensor + name: "Filter Out NaN" + filters: + - filter_out: nan # Filter out NaN values + + # FilterOutValueFilter - mixed values with NaN + - platform: copy + source_id: source_sensor + name: "Filter Out Mixed with NaN" + filters: + - filter_out: [nan, 0.0, 42.0] + + # ThrottleWithPriorityFilter - single priority value + - platform: copy + source_id: source_sensor + name: "Throttle with Single Priority" + filters: + - throttle_with_priority: + timeout: 1000ms + value: 42.0 # Priority value bypasses throttle + + # ThrottleWithPriorityFilter - multiple priority values + - platform: copy + source_id: source_sensor + name: "Throttle with Multiple Priorities" + filters: + - throttle_with_priority: + timeout: 500ms + value: [0.0, 42.0, 100.0] # Multiple priority values + + # ThrottleWithPriorityFilter - with NaN priority + - platform: copy + source_id: source_sensor + name: "Throttle with NaN Priority" + filters: + - throttle_with_priority: + timeout: 1000ms + value: nan # NaN as priority value + + # Combined filters - FilterOutValueFilter + other filters + - platform: copy + source_id: source_sensor + name: "Filter Out Then Throttle" + filters: + - filter_out: [0.0, 100.0] + - throttle: 500ms + + # Combined filters - ThrottleWithPriorityFilter + other filters + - platform: copy + source_id: source_sensor + name: "Throttle Priority Then Scale" + filters: + - throttle_with_priority: + timeout: 1000ms + value: [42.0] + - multiply: 2.0 + + # CalibrateLinearFilter - piecewise linear calibration + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Two Points" + filters: + - calibrate_linear: + - 0.0 -> 0.0 + - 100.0 -> 100.0 + + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Multiple Segments" + filters: + - calibrate_linear: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + - platform: copy + source_id: source_sensor + name: "Calibrate Linear Least Squares" + filters: + - calibrate_linear: + method: least_squares + datapoints: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + # CalibratePolynomialFilter - polynomial calibration + - platform: copy + source_id: source_sensor + name: "Calibrate Polynomial Degree 2" + filters: + - calibrate_polynomial: + degree: 2 + datapoints: + - 0.0 -> 0.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + - platform: copy + source_id: source_sensor + name: "Calibrate Polynomial Degree 3" + filters: + - calibrate_polynomial: + degree: 3 + datapoints: + - 0.0 -> 0.0 + - 25.0 -> 26.0 + - 50.0 -> 55.0 + - 100.0 -> 102.5 + + # OrFilter - filter branching + - platform: copy + source_id: source_sensor + name: "Or Filter with Multiple Branches" + filters: + - or: + - multiply: 2.0 + - offset: 10.0 + - lambda: return x * 3.0; diff --git a/tests/components/sensor/test.esp8266-ard.yaml b/tests/components/sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/template/common-base.yaml b/tests/components/template/common-base.yaml index ea812532d4..b873af5207 100644 --- a/tests/components/template/common-base.yaml +++ b/tests/components/template/common-base.yaml @@ -101,6 +101,9 @@ sensor: - filter_out: 10 - filter_out: !lambda return NAN; - heartbeat: 5s + - heartbeat: + period: 5s + optimistic: true - lambda: return x * (9.0/5.0) + 32.0; - max: window_size: 10 diff --git a/tests/components/text_sensor/common.yaml b/tests/components/text_sensor/common.yaml new file mode 100644 index 0000000000..4459c0fa44 --- /dev/null +++ b/tests/components/text_sensor/common.yaml @@ -0,0 +1,66 @@ +text_sensor: + - platform: template + name: "Test Substitute Single" + id: test_substitute_single + filters: + - substitute: + - ERROR -> Error + + - platform: template + name: "Test Substitute Multiple" + id: test_substitute_multiple + filters: + - substitute: + - ERROR -> Error + - WARN -> Warning + - INFO -> Information + - DEBUG -> Debug + + - platform: template + name: "Test Substitute Chained" + id: test_substitute_chained + filters: + - substitute: + - foo -> bar + - to_upper + - substitute: + - BAR -> baz + + - platform: template + name: "Test Map Single" + id: test_map_single + filters: + - map: + - ON -> Active + + - platform: template + name: "Test Map Multiple" + id: test_map_multiple + filters: + - map: + - ON -> Active + - OFF -> Inactive + - UNKNOWN -> Error + - IDLE -> Standby + + - platform: template + name: "Test Map Passthrough" + id: test_map_passthrough + filters: + - map: + - Good -> Excellent + - Bad -> Poor + + - platform: template + name: "Test All Filters" + id: test_all_filters + filters: + - to_upper + - to_lower + - append: " suffix" + - prepend: "prefix " + - substitute: + - prefix -> PREFIX + - suffix -> SUFFIX + - map: + - PREFIX text SUFFIX -> mapped diff --git a/tests/components/text_sensor/test.esp8266-ard.yaml b/tests/components/text_sensor/test.esp8266-ard.yaml new file mode 100644 index 0000000000..dade44d145 --- /dev/null +++ b/tests/components/text_sensor/test.esp8266-ard.yaml @@ -0,0 +1 @@ +<<: !include common.yaml diff --git a/tests/components/uart/common.h b/tests/components/uart/common.h new file mode 100644 index 0000000000..5597b86410 --- /dev/null +++ b/tests/components/uart/common.h @@ -0,0 +1,37 @@ +#pragma once +#include +#include +#include +#include +#include +#include "esphome/components/uart/uart_component.h" + +namespace esphome::uart::testing { + +using ::testing::_; +using ::testing::Return; +using ::testing::SaveArg; +using ::testing::DoAll; +using ::testing::Invoke; +using ::testing::SetArgPointee; + +// Derive a mock from UARTComponent to test the wrapper implementations. +class MockUARTComponent : public UARTComponent { + public: + using UARTComponent::write_array; + using UARTComponent::write_byte; + + // NOTE: std::vector is used here for test convenience. For production code, + // consider using StaticVector or FixedVector from esphome/core/helpers.h instead. + std::vector written_data; + + void write_array(const uint8_t *data, size_t len) override { written_data.assign(data, data + len); } + + MOCK_METHOD(bool, read_array, (uint8_t * data, size_t len), (override)); + MOCK_METHOD(bool, peek_byte, (uint8_t * data), (override)); + MOCK_METHOD(int, available, (), (override)); + MOCK_METHOD(void, flush, (), (override)); + MOCK_METHOD(void, check_logger_conflict, (), (override)); +}; + +} // namespace esphome::uart::testing diff --git a/tests/components/uart/uart_component.cpp b/tests/components/uart/uart_component.cpp new file mode 100644 index 0000000000..2cab1f62ad --- /dev/null +++ b/tests/components/uart/uart_component.cpp @@ -0,0 +1,73 @@ +#include "common.h" + +namespace esphome::uart::testing { + +TEST(UARTComponentTest, SetGetBaudRate) { + MockUARTComponent mock; + mock.set_baud_rate(38400); + EXPECT_EQ(mock.get_baud_rate(), 38400); +} + +TEST(UARTComponentTest, SetGetStopBits) { + MockUARTComponent mock; + mock.set_stop_bits(2); + EXPECT_EQ(mock.get_stop_bits(), 2); +} + +TEST(UARTComponentTest, SetGetDataBits) { + MockUARTComponent mock; + mock.set_data_bits(7); + EXPECT_EQ(mock.get_data_bits(), 7); +} + +TEST(UARTComponentTest, SetGetParity) { + MockUARTComponent mock; + mock.set_parity(UARTParityOptions::UART_CONFIG_PARITY_EVEN); + EXPECT_EQ(mock.get_parity(), UARTParityOptions::UART_CONFIG_PARITY_EVEN); +} + +TEST(UARTComponentTest, SetGetRxBufferSize) { + MockUARTComponent mock; + mock.set_rx_buffer_size(128); + EXPECT_EQ(mock.get_rx_buffer_size(), 128); +} + +TEST(UARTComponentTest, WriteArrayVector) { + MockUARTComponent mock; + std::vector data = {10, 20, 30}; + mock.write_array(data); + EXPECT_EQ(mock.written_data, data); +} +TEST(UARTComponentTest, WriteByte) { + MockUARTComponent mock; + uint8_t byte = 0x79; + mock.write_byte(byte); + EXPECT_EQ(mock.written_data.size(), 1); + EXPECT_EQ(mock.written_data[0], byte); +} + +TEST(UARTComponentTest, WriteStr) { + MockUARTComponent mock; + const char *str = "Hello"; + std::vector captured; + mock.write_str(str); + EXPECT_EQ(mock.written_data.size(), strlen(str)); + EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size())); +} + +// Tests for wrapper methods forwarding to pure virtual read_array +TEST(UARTComponentTest, ReadByteSuccess) { + MockUARTComponent mock; + uint8_t value = 0; + EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(true)); + EXPECT_TRUE(mock.read_byte(&value)); +} + +TEST(UARTComponentTest, ReadByteFailure) { + MockUARTComponent mock; + uint8_t value = 0xFF; + EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(false)); + EXPECT_FALSE(mock.read_byte(&value)); +} + +} // namespace esphome::uart::testing diff --git a/tests/components/uart/uart_device.cpp b/tests/components/uart/uart_device.cpp new file mode 100644 index 0000000000..c3f1d9078b --- /dev/null +++ b/tests/components/uart/uart_device.cpp @@ -0,0 +1,108 @@ +#include "common.h" +#include "esphome/components/uart/uart.h" + +namespace esphome::uart::testing { + +TEST(UARTDeviceTest, ReadByteSuccess) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, read_array(_, 1)).WillOnce(DoAll(SetArgPointee<0>(0x5A), Return(true))); + bool result = dev.read_byte(&value); + EXPECT_TRUE(result); + EXPECT_EQ(value, 0x5A); +} + +TEST(UARTDeviceTest, ReadByteFailure) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0xFF; + EXPECT_CALL(mock, read_array(_, 1)).WillOnce(Return(false)); + bool result = dev.read_byte(&value); + EXPECT_FALSE(result); +} + +TEST(UARTDeviceTest, PeekByteSuccess) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, peek_byte(_)).WillOnce(DoAll(SetArgPointee<0>(0xA5), Return(true))); + bool result = dev.peek_byte(&value); + EXPECT_TRUE(result); + EXPECT_EQ(value, 0xA5); +} + +TEST(UARTDeviceTest, PeekByteFailure) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t value = 0; + EXPECT_CALL(mock, peek_byte(_)).WillOnce(Return(false)); + bool result = dev.peek_byte(&value); + EXPECT_FALSE(result); +} + +TEST(UARTDeviceTest, Available) { + MockUARTComponent mock; + UARTDevice dev(&mock); + EXPECT_CALL(mock, available()).WillOnce(Return(5)); + EXPECT_EQ(dev.available(), 5); +} + +TEST(UARTDeviceTest, FlushCallsParent) { + MockUARTComponent mock; + UARTDevice dev(&mock); + EXPECT_CALL(mock, flush()).Times(1); + dev.flush(); +} + +TEST(UARTDeviceTest, WriteByteForwardsToWriteArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + dev.write_byte(0xAB); + EXPECT_EQ(mock.written_data.size(), 1); + EXPECT_EQ(mock.written_data[0], 0xAB); +} +TEST(UARTDeviceTest, WriteArrayPointer) { + MockUARTComponent mock; + UARTDevice dev(&mock); + uint8_t data[3] = {1, 2, 3}; + dev.write_array(data, 3); + EXPECT_EQ(mock.written_data.size(), 3); + EXPECT_EQ(mock.written_data, std::vector(data, data + 3)); +} + +TEST(UARTDeviceTest, WriteArrayVector) { + MockUARTComponent mock; + UARTDevice dev(&mock); + std::vector data = {4, 5, 6}; + dev.write_array(data); + EXPECT_EQ(mock.written_data, data); +} + +TEST(UARTDeviceTest, WriteArrayStdArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + std::array data = {7, 8, 9, 10}; + dev.write_array(data); + EXPECT_EQ(mock.written_data.size(), data.size()); + EXPECT_EQ(mock.written_data, std::vector(data.begin(), data.end())); +} + +TEST(UARTDeviceTest, WriteStrForwardsToWriteArray) { + MockUARTComponent mock; + UARTDevice dev(&mock); + const char *str = "ESPHome"; + dev.write_str(str); + EXPECT_EQ(mock.written_data.size(), strlen(str)); + EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size())); +} + +TEST(UARTDeviceTest, WriteStrEmptyString) { + MockUARTComponent mock; + UARTDevice dev(&mock); + const char *str = ""; + dev.write_str(str); + EXPECT_EQ(mock.written_data.size(), 0); +} + +} // namespace esphome::uart::testing diff --git a/tests/components/uponor_smatrix/common.yaml b/tests/components/uponor_smatrix/common.yaml index 786a604aec..7bb5e952ad 100644 --- a/tests/components/uponor_smatrix/common.yaml +++ b/tests/components/uponor_smatrix/common.yaml @@ -11,18 +11,17 @@ time: - 192.168.178.1 uponor_smatrix: - address: 0x110B time_id: sntp_time - time_device_address: 0xDE13 + time_device_address: 0x110BDE13 climate: - platform: uponor_smatrix - address: 0xDE13 + address: 0x110BDE13 name: Thermostat Living Room sensor: - platform: uponor_smatrix - address: 0xDE13 + address: 0x110BDE13 humidity: name: Thermostat Humidity Living Room temperature: diff --git a/tests/components/wifi/common.yaml b/tests/components/wifi/common.yaml index 343d44b177..af27f85092 100644 --- a/tests/components/wifi/common.yaml +++ b/tests/components/wifi/common.yaml @@ -12,5 +12,8 @@ esphome: - logger.log: "Failed to connect to WiFi!" wifi: - ssid: MySSID - password: password1 + networks: + - ssid: MySSID + password: password1 + - ssid: MySSID2 + password: password2 diff --git a/tests/integration/README.md b/tests/integration/README.md index 8fce81bb80..2a6b6fe564 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -7,6 +7,7 @@ This directory contains end-to-end integration tests for ESPHome, focusing on te - `conftest.py` - Common fixtures and utilities - `const.py` - Constants used throughout the integration tests - `types.py` - Type definitions for fixtures and functions +- `state_utils.py` - State handling utilities (e.g., `InitialStateHelper`, `build_key_to_entity_mapping`) - `fixtures/` - YAML configuration files for tests - `test_*.py` - Individual test files @@ -26,6 +27,32 @@ The `yaml_config` fixture automatically loads YAML configurations based on the t - `reserved_tcp_port` - Reserves a TCP port by holding the socket open until ESPHome needs it - `unused_tcp_port` - Provides the reserved port number for each test +### Helper Utilities + +#### InitialStateHelper (`state_utils.py`) + +The `InitialStateHelper` class solves a common problem in integration tests: when an API client connects, ESPHome automatically broadcasts the current state of all entities. This can interfere with tests that want to track only new state changes triggered by test actions. + +**What it does:** +- Tracks all entities (except stateless ones like buttons) +- Swallows the first state broadcast for each entity +- Forwards all subsequent state changes to your test callback +- Provides `wait_for_initial_states()` to synchronize before test actions + +**When to use it:** +- Any test that triggers entity state changes and needs to verify them +- Tests that would otherwise see duplicate or unexpected states +- Tests that need clean separation between initial state and test-triggered changes + +**Implementation details:** +- Uses `(device_id, key)` tuples to uniquely identify entities across devices +- Automatically excludes `ButtonInfo` entities (stateless) +- Provides debug logging to track state reception (use `--log-cli-level=DEBUG`) +- Safe for concurrent use with multiple entity types + +**Future work:** +Consider converting existing integration tests to use `InitialStateHelper` for more reliable state tracking and to eliminate race conditions related to initial state broadcasts. + ### Writing Tests The simplest way to write a test is to use the `run_compiled` and `api_client_connected` fixtures: @@ -125,6 +152,54 @@ async def test_my_sensor( ``` ##### State Subscription Pattern + +**Recommended: Using InitialStateHelper** + +When an API client connects, ESPHome automatically sends the current state of all entities. The `InitialStateHelper` (from `state_utils.py`) handles this by swallowing these initial states and only forwarding subsequent state changes to your test callback: + +```python +from .state_utils import InitialStateHelper + +# Track state changes with futures +loop = asyncio.get_running_loop() +states: dict[int, EntityState] = {} +state_future: asyncio.Future[EntityState] = loop.create_future() + +def on_state(state: EntityState) -> None: + """This callback only receives NEW state changes, not initial states.""" + states[state.key] = state + # Check for specific condition using isinstance + if isinstance(state, SensorState) and state.state == expected_value: + if not state_future.done(): + state_future.set_result(state) + +# Get entities and set up state synchronization +entities, services = await client.list_entities_services() +initial_state_helper = InitialStateHelper(entities) + +# Subscribe with the wrapper that filters initial states +client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + +# Wait for all initial states to be broadcast +try: + await initial_state_helper.wait_for_initial_states() +except TimeoutError: + pytest.fail("Timeout waiting for initial states") + +# Now perform your test actions - on_state will only receive new changes +# ... trigger state changes ... + +# Wait for expected state +try: + result = await asyncio.wait_for(state_future, timeout=5.0) +except asyncio.TimeoutError: + pytest.fail(f"Expected state not received. Got: {list(states.values())}") +``` + +**Legacy: Manual State Tracking** + +If you need to handle initial states manually (not recommended for new tests): + ```python # Track state changes with futures loop = asyncio.get_running_loop() diff --git a/tests/integration/fixtures/host_mode_climate_basic_state.yaml b/tests/integration/fixtures/host_mode_climate_basic_state.yaml new file mode 100644 index 0000000000..f79d684fc6 --- /dev/null +++ b/tests/integration/fixtures/host_mode_climate_basic_state.yaml @@ -0,0 +1,112 @@ +esphome: + name: host-climate-test +host: +api: +logger: + +climate: + - platform: thermostat + id: dual_mode_thermostat + name: Dual-mode Thermostat + sensor: host_thermostat_temperature_sensor + humidity_sensor: host_thermostat_humidity_sensor + humidity_hysteresis: 1.0 + min_cooling_off_time: 20s + min_cooling_run_time: 20s + max_cooling_run_time: 30s + supplemental_cooling_delta: 3.0 + min_heating_off_time: 20s + min_heating_run_time: 20s + max_heating_run_time: 30s + supplemental_heating_delta: 3.0 + min_fanning_off_time: 20s + min_fanning_run_time: 20s + min_idle_time: 10s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 + default_preset: home + preset: + - name: "away" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + - name: "home" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + auto_mode: + - logger.log: "AUTO mode set" + heat_cool_mode: + - logger.log: "HEAT_COOL mode set" + cool_action: + - switch.turn_on: air_cond + supplemental_cooling_action: + - switch.turn_on: air_cond_2 + heat_action: + - switch.turn_on: heater + supplemental_heating_action: + - switch.turn_on: heater_2 + dry_action: + - switch.turn_on: air_cond + fan_only_action: + - switch.turn_on: fan_only + idle_action: + - switch.turn_off: air_cond + - switch.turn_off: air_cond_2 + - switch.turn_off: heater + - switch.turn_off: heater_2 + - switch.turn_off: fan_only + humidity_control_humidify_action: + - switch.turn_on: humidifier + humidity_control_off_action: + - switch.turn_off: humidifier + +sensor: + - platform: template + id: host_thermostat_humidity_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 42.0; + update_interval: 0.1s + - platform: template + id: host_thermostat_temperature_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 22.0; + update_interval: 0.1s + +switch: + - platform: template + id: air_cond + name: Air Conditioner + optimistic: true + - platform: template + id: air_cond_2 + name: Air Conditioner 2 + optimistic: true + - platform: template + id: fan_only + name: Fan + optimistic: true + - platform: template + id: heater + name: Heater + optimistic: true + - platform: template + id: heater_2 + name: Heater 2 + optimistic: true + - platform: template + id: dehumidifier + name: Dehumidifier + optimistic: true + - platform: template + id: humidifier + name: Humidifier + optimistic: true diff --git a/tests/integration/fixtures/host_mode_climate_control.yaml b/tests/integration/fixtures/host_mode_climate_control.yaml new file mode 100644 index 0000000000..c60e0597a2 --- /dev/null +++ b/tests/integration/fixtures/host_mode_climate_control.yaml @@ -0,0 +1,108 @@ +esphome: + name: host-climate-test +host: +api: +logger: + +climate: + - platform: thermostat + id: dual_mode_thermostat + name: Dual-mode Thermostat + sensor: host_thermostat_temperature_sensor + humidity_sensor: host_thermostat_humidity_sensor + humidity_hysteresis: 1.0 + min_cooling_off_time: 20s + min_cooling_run_time: 20s + max_cooling_run_time: 30s + supplemental_cooling_delta: 3.0 + min_heating_off_time: 20s + min_heating_run_time: 20s + max_heating_run_time: 30s + supplemental_heating_delta: 3.0 + min_fanning_off_time: 20s + min_fanning_run_time: 20s + min_idle_time: 10s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 + default_preset: home + preset: + - name: "away" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + - name: "home" + default_target_temperature_low: 18.0 + default_target_temperature_high: 24.0 + auto_mode: + - logger.log: "AUTO mode set" + heat_cool_mode: + - logger.log: "HEAT_COOL mode set" + cool_action: + - switch.turn_on: air_cond + supplemental_cooling_action: + - switch.turn_on: air_cond_2 + heat_action: + - switch.turn_on: heater + supplemental_heating_action: + - switch.turn_on: heater_2 + dry_action: + - switch.turn_on: air_cond + fan_only_action: + - switch.turn_on: fan_only + idle_action: + - switch.turn_off: air_cond + - switch.turn_off: air_cond_2 + - switch.turn_off: heater + - switch.turn_off: heater_2 + - switch.turn_off: fan_only + humidity_control_humidify_action: + - switch.turn_on: humidifier + humidity_control_off_action: + - switch.turn_off: humidifier + +sensor: + - platform: template + id: host_thermostat_humidity_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 42.0; + update_interval: 0.1s + - platform: template + id: host_thermostat_temperature_sensor + unit_of_measurement: °C + accuracy_decimals: 2 + state_class: measurement + force_update: true + lambda: return 22.0; + update_interval: 0.1s + +switch: + - platform: template + id: air_cond + name: Air Conditioner + optimistic: true + - platform: template + id: air_cond_2 + name: Air Conditioner 2 + optimistic: true + - platform: template + id: fan_only + name: Fan + optimistic: true + - platform: template + id: heater + name: Heater + optimistic: true + - platform: template + id: heater_2 + name: Heater 2 + optimistic: true + - platform: template + id: humidifier + name: Humidifier + optimistic: true diff --git a/tests/integration/fixtures/host_mode_many_entities.yaml b/tests/integration/fixtures/host_mode_many_entities.yaml index 612186507c..acb03f235b 100644 --- a/tests/integration/fixtures/host_mode_many_entities.yaml +++ b/tests/integration/fixtures/host_mode_many_entities.yaml @@ -210,7 +210,15 @@ sensor: name: "Test Sensor 50" lambda: return 50.0; update_interval: 0.1s - # Temperature sensor for the thermostat + # Sensors for the thermostat + - platform: template + name: "Humidity Sensor" + id: humidity_sensor + lambda: return 35.0; + unit_of_measurement: "%" + device_class: humidity + state_class: measurement + update_interval: 5s - platform: template name: "Temperature Sensor" id: temp_sensor @@ -295,6 +303,11 @@ valve: - logger.log: "Valve stopping" output: + - platform: template + id: humidifier_output + type: binary + write_action: + - logger.log: "Humidifier output changed" - platform: template id: heater_output type: binary @@ -305,18 +318,31 @@ output: type: binary write_action: - logger.log: "Cooler output changed" + - platform: template + id: fan_output + type: binary + write_action: + - logger.log: "Fan output changed" climate: - platform: thermostat name: "Test Thermostat" sensor: temp_sensor + humidity_sensor: humidity_sensor default_preset: Home on_boot_restore_from: default_preset min_heating_off_time: 1s min_heating_run_time: 1s min_cooling_off_time: 1s min_cooling_run_time: 1s + min_fan_mode_switching_time: 1s min_idle_time: 1s + visual: + min_humidity: 20% + max_humidity: 70% + min_temperature: 15.0 + max_temperature: 32.0 + temperature_step: 0.1 heat_action: - output.turn_on: heater_output cool_action: @@ -324,6 +350,14 @@ climate: idle_action: - output.turn_off: heater_output - output.turn_off: cooler_output + humidity_control_humidify_action: + - output.turn_on: humidifier_output + humidity_control_off_action: + - output.turn_off: humidifier_output + fan_mode_auto_action: + - output.turn_off: fan_output + fan_mode_on_action: + - output.turn_on: fan_output preset: - name: Home default_target_temperature_low: 20 diff --git a/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml b/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml new file mode 100644 index 0000000000..3ce84cd373 --- /dev/null +++ b/tests/integration/fixtures/noise_encryption_key_clear_protection.yaml @@ -0,0 +1,10 @@ +esphome: + name: noise-key-test + +host: + +api: + encryption: + key: "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs=" + +logger: diff --git a/tests/integration/fixtures/sensor_filters_value_list.yaml b/tests/integration/fixtures/sensor_filters_value_list.yaml new file mode 100644 index 0000000000..2b796a5be1 --- /dev/null +++ b/tests/integration/fixtures/sensor_filters_value_list.yaml @@ -0,0 +1,332 @@ +esphome: + name: test-value-list-filters + +host: +api: + batch_delay: 0ms # Disable batching to receive all state updates +logger: + level: DEBUG + +# Template sensors - one for each test to avoid cross-test interference +sensor: + - platform: template + name: "Source Sensor 1" + id: source_sensor_1 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 2" + id: source_sensor_2 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 3" + id: source_sensor_3 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 4" + id: source_sensor_4 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 5" + id: source_sensor_5 + accuracy_decimals: 1 + + - platform: template + name: "Source Sensor 6" + id: source_sensor_6 + accuracy_decimals: 2 + + - platform: template + name: "Source Sensor 7" + id: source_sensor_7 + accuracy_decimals: 1 + + # FilterOutValueFilter - single value + - platform: copy + source_id: source_sensor_1 + name: "Filter Out Single" + id: filter_out_single + filters: + - filter_out: 42.0 + + # FilterOutValueFilter - multiple values + - platform: copy + source_id: source_sensor_2 + name: "Filter Out Multiple" + id: filter_out_multiple + filters: + - filter_out: [0.0, 42.0, 100.0] + + # FilterOutValueFilter - with NaN + - platform: copy + source_id: source_sensor_1 + name: "Filter Out NaN" + id: filter_out_nan + filters: + - filter_out: nan + + # ThrottleWithPriorityFilter - single priority value + - platform: copy + source_id: source_sensor_3 + name: "Throttle Priority Single" + id: throttle_priority_single + filters: + - throttle_with_priority: + timeout: 200ms + value: 42.0 + + # ThrottleWithPriorityFilter - multiple priority values + - platform: copy + source_id: source_sensor_4 + name: "Throttle Priority Multiple" + id: throttle_priority_multiple + filters: + - throttle_with_priority: + timeout: 200ms + value: [0.0, 42.0, 100.0] + + # Edge case: Filter Out NaN explicitly + - platform: copy + source_id: source_sensor_5 + name: "Filter Out NaN Test" + id: filter_out_nan_test + filters: + - filter_out: nan + + # Edge case: Accuracy decimals - 2 decimals + - platform: copy + source_id: source_sensor_6 + name: "Filter Out Accuracy 2" + id: filter_out_accuracy_2 + filters: + - filter_out: 42.0 + + # Edge case: Throttle with NaN priority + - platform: copy + source_id: source_sensor_7 + name: "Throttle Priority NaN" + id: throttle_priority_nan + filters: + - throttle_with_priority: + timeout: 200ms + value: nan + +# Script to test FilterOutValueFilter +script: + - id: test_filter_out_single + then: + # Should pass through: 1.0, 2.0, 3.0 + # Should filter out: 42.0 + - sensor.template.publish: + id: source_sensor_1 + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_1 + state: 3.0 + + - id: test_filter_out_multiple + then: + # Should filter out: 0.0, 42.0, 100.0 + # Should pass through: 1.0, 2.0, 50.0 + - sensor.template.publish: + id: source_sensor_2 + state: 0.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 1.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 42.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 2.0 + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 100.0 # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_2 + state: 50.0 + + - id: test_throttle_priority_single + then: + # 42.0 bypasses throttle, other values are throttled + - sensor.template.publish: + id: source_sensor_3 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 42.0 # Priority - passes immediately + - delay: 50ms + - sensor.template.publish: + id: source_sensor_3 + state: 3.0 # Throttled + - delay: 250ms # Wait for throttle to expire + - sensor.template.publish: + id: source_sensor_3 + state: 4.0 # Passes after timeout + + - id: test_throttle_priority_multiple + then: + # 0.0, 42.0, 100.0 bypass throttle + - sensor.template.publish: + id: source_sensor_4 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 0.0 # Priority - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 3.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 42.0 # Priority - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 4.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_4 + state: 100.0 # Priority - passes + + - id: test_filter_out_nan + then: + # NaN should be filtered out, regular values pass + - sensor.template.publish: + id: source_sensor_5 + state: 1.0 # Pass + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: !lambda 'return NAN;' # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: 2.0 # Pass + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: !lambda 'return NAN;' # Filtered out + - delay: 20ms + - sensor.template.publish: + id: source_sensor_5 + state: 3.0 # Pass + + - id: test_filter_out_accuracy_2 + then: + # With 2 decimal places, 42.00 filtered, 42.01 and 42.15 pass + - sensor.template.publish: + id: source_sensor_6 + state: 42.0 # Filtered (rounds to 42.00) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.01 # Pass (rounds to 42.01) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.15 # Pass (rounds to 42.15) + - delay: 20ms + - sensor.template.publish: + id: source_sensor_6 + state: 42.0 # Filtered (rounds to 42.00) + + - id: test_throttle_priority_nan + then: + # NaN bypasses throttle, regular values throttled + - sensor.template.publish: + id: source_sensor_7 + state: 1.0 # First value - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: 2.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: !lambda 'return NAN;' # Priority NaN - passes + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: 3.0 # Throttled + - delay: 50ms + - sensor.template.publish: + id: source_sensor_7 + state: !lambda 'return NAN;' # Priority NaN - passes + +# Buttons to trigger each test +button: + - platform: template + name: "Test Filter Out Single" + id: btn_filter_out_single + on_press: + - script.execute: test_filter_out_single + + - platform: template + name: "Test Filter Out Multiple" + id: btn_filter_out_multiple + on_press: + - script.execute: test_filter_out_multiple + + - platform: template + name: "Test Throttle Priority Single" + id: btn_throttle_priority_single + on_press: + - script.execute: test_throttle_priority_single + + - platform: template + name: "Test Throttle Priority Multiple" + id: btn_throttle_priority_multiple + on_press: + - script.execute: test_throttle_priority_multiple + + - platform: template + name: "Test Filter Out NaN" + id: btn_filter_out_nan + on_press: + - script.execute: test_filter_out_nan + + - platform: template + name: "Test Filter Out Accuracy 2" + id: btn_filter_out_accuracy_2 + on_press: + - script.execute: test_filter_out_accuracy_2 + + - platform: template + name: "Test Throttle Priority NaN" + id: btn_throttle_priority_nan + on_press: + - script.execute: test_throttle_priority_nan diff --git a/tests/integration/sensor_test_utils.py b/tests/integration/sensor_test_utils.py deleted file mode 100644 index c3843a26ab..0000000000 --- a/tests/integration/sensor_test_utils.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Shared utilities for sensor integration tests.""" - -from __future__ import annotations - -from aioesphomeapi import EntityInfo - - -def build_key_to_sensor_mapping( - entities: list[EntityInfo], sensor_names: list[str] -) -> dict[int, str]: - """Build a mapping from entity keys to sensor names. - - Args: - entities: List of entity info objects from the API - sensor_names: List of sensor names to search for in object_ids - - Returns: - Dictionary mapping entity keys to sensor names - """ - key_to_sensor: dict[int, str] = {} - for entity in entities: - obj_id = entity.object_id.lower() - for sensor_name in sensor_names: - if sensor_name in obj_id: - key_to_sensor[entity.key] = sensor_name - break - return key_to_sensor diff --git a/tests/integration/state_utils.py b/tests/integration/state_utils.py new file mode 100644 index 0000000000..58d6d2790f --- /dev/null +++ b/tests/integration/state_utils.py @@ -0,0 +1,167 @@ +"""Shared utilities for ESPHome integration tests - state handling.""" + +from __future__ import annotations + +import asyncio +import logging + +from aioesphomeapi import ButtonInfo, EntityInfo, EntityState + +_LOGGER = logging.getLogger(__name__) + + +def build_key_to_entity_mapping( + entities: list[EntityInfo], entity_names: list[str] +) -> dict[int, str]: + """Build a mapping from entity keys to entity names. + + Args: + entities: List of entity info objects from the API + entity_names: List of entity names to search for in object_ids + + Returns: + Dictionary mapping entity keys to entity names + """ + key_to_entity: dict[int, str] = {} + for entity in entities: + obj_id = entity.object_id.lower() + for entity_name in entity_names: + if entity_name in obj_id: + key_to_entity[entity.key] = entity_name + break + return key_to_entity + + +class InitialStateHelper: + """Helper to wait for initial states before processing test states. + + When an API client connects, ESPHome sends the current state of all entities. + This helper wraps the user's state callback and swallows the first state for + each entity, then forwards all subsequent states to the user callback. + + Usage: + entities, services = await client.list_entities_services() + helper = InitialStateHelper(entities) + client.subscribe_states(helper.on_state_wrapper(user_callback)) + await helper.wait_for_initial_states() + """ + + def __init__(self, entities: list[EntityInfo]) -> None: + """Initialize the helper. + + Args: + entities: All entities from list_entities_services() + """ + # Set of (device_id, key) tuples waiting for initial state + # Buttons are stateless, so exclude them + self._wait_initial_states = { + (entity.device_id, entity.key) + for entity in entities + if not isinstance(entity, ButtonInfo) + } + # Keep entity info for debugging - use (device_id, key) tuple + self._entities_by_id = { + (entity.device_id, entity.key): entity for entity in entities + } + + # Log all entities + _LOGGER.debug( + "InitialStateHelper: Found %d total entities: %s", + len(entities), + [(type(e).__name__, e.object_id) for e in entities], + ) + + # Log which ones we're waiting for + _LOGGER.debug( + "InitialStateHelper: Waiting for %d entities (excluding ButtonInfo): %s", + len(self._wait_initial_states), + [self._entities_by_id[k].object_id for k in self._wait_initial_states], + ) + + # Log which ones we're NOT waiting for + not_waiting = { + (e.device_id, e.key) for e in entities + } - self._wait_initial_states + if not_waiting: + not_waiting_info = [ + f"{type(self._entities_by_id[k]).__name__}:{self._entities_by_id[k].object_id}" + for k in not_waiting + ] + _LOGGER.debug( + "InitialStateHelper: NOT waiting for %d entities: %s", + len(not_waiting), + not_waiting_info, + ) + + # Create future in the running event loop + self._initial_states_received = asyncio.get_running_loop().create_future() + # If no entities to wait for, mark complete immediately + if not self._wait_initial_states: + self._initial_states_received.set_result(True) + + def on_state_wrapper(self, user_callback): + """Wrap a user callback to track initial states. + + Args: + user_callback: The user's state callback function + + Returns: + Wrapped callback that swallows first state per entity, forwards rest + """ + + def wrapper(state: EntityState) -> None: + """Swallow initial state per entity, forward subsequent states.""" + # Create entity identifier tuple + entity_id = (state.device_id, state.key) + + # Log which entity is sending state + if entity_id in self._entities_by_id: + entity = self._entities_by_id[entity_id] + _LOGGER.debug( + "Received state for %s (type: %s, device_id: %s, key: %d)", + entity.object_id, + type(entity).__name__, + state.device_id, + state.key, + ) + + # If this entity is waiting for initial state + if entity_id in self._wait_initial_states: + # Remove from waiting set + self._wait_initial_states.discard(entity_id) + + _LOGGER.debug( + "Swallowed initial state for %s, %d entities remaining", + self._entities_by_id[entity_id].object_id + if entity_id in self._entities_by_id + else entity_id, + len(self._wait_initial_states), + ) + + # Check if we've now seen all entities + if ( + not self._wait_initial_states + and not self._initial_states_received.done() + ): + _LOGGER.debug("All initial states received") + self._initial_states_received.set_result(True) + + # Don't forward initial state to user + return + + # Forward subsequent states to user callback + _LOGGER.debug("Forwarding state to user callback") + user_callback(state) + + return wrapper + + async def wait_for_initial_states(self, timeout: float = 5.0) -> None: + """Wait for all initial states to be received. + + Args: + timeout: Maximum time to wait in seconds + + Raises: + asyncio.TimeoutError: If initial states aren't received within timeout + """ + await asyncio.wait_for(self._initial_states_received, timeout=timeout) diff --git a/tests/integration/test_host_mode_climate_basic_state.py b/tests/integration/test_host_mode_climate_basic_state.py new file mode 100644 index 0000000000..4697342a99 --- /dev/null +++ b/tests/integration/test_host_mode_climate_basic_state.py @@ -0,0 +1,49 @@ +"""Integration test for Host mode with climate.""" + +from __future__ import annotations + +import asyncio + +import aioesphomeapi +from aioesphomeapi import ClimateAction, ClimateMode, ClimatePreset, EntityState +import pytest + +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_host_mode_climate_basic_state( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test basic climate state reporting.""" + loop = asyncio.get_running_loop() + async with run_compiled(yaml_config), api_client_connected() as client: + states: dict[int, EntityState] = {} + climate_future: asyncio.Future[EntityState] = loop.create_future() + + def on_state(state: EntityState) -> None: + states[state.key] = state + if ( + isinstance(state, aioesphomeapi.ClimateState) + and not climate_future.done() + ): + climate_future.set_result(state) + + client.subscribe_states(on_state) + + try: + climate_state = await asyncio.wait_for(climate_future, timeout=5.0) + except TimeoutError: + pytest.fail("Climate state not received within 5 seconds") + + assert isinstance(climate_state, aioesphomeapi.ClimateState) + assert climate_state.mode == ClimateMode.OFF + assert climate_state.action == ClimateAction.OFF + assert climate_state.current_temperature == 22.0 + assert climate_state.target_temperature_low == 18.0 + assert climate_state.target_temperature_high == 24.0 + assert climate_state.preset == ClimatePreset.HOME + assert climate_state.current_humidity == 42.0 + assert climate_state.target_humidity == 20.0 diff --git a/tests/integration/test_host_mode_climate_control.py b/tests/integration/test_host_mode_climate_control.py new file mode 100644 index 0000000000..96d15dfae0 --- /dev/null +++ b/tests/integration/test_host_mode_climate_control.py @@ -0,0 +1,76 @@ +"""Integration test for Host mode with climate.""" + +from __future__ import annotations + +import asyncio + +import aioesphomeapi +from aioesphomeapi import ClimateInfo, ClimateMode, EntityState +import pytest + +from .state_utils import InitialStateHelper +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_host_mode_climate_control( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test climate mode control.""" + loop = asyncio.get_running_loop() + async with run_compiled(yaml_config), api_client_connected() as client: + states: dict[int, EntityState] = {} + climate_future: asyncio.Future[EntityState] = loop.create_future() + + def on_state(state: EntityState) -> None: + states[state.key] = state + if ( + isinstance(state, aioesphomeapi.ClimateState) + and state.mode == ClimateMode.HEAT + and state.target_temperature_low == 21.5 + and state.target_temperature_high == 26.5 + and not climate_future.done() + ): + climate_future.set_result(state) + + # Get entities and set up state synchronization + entities, services = await client.list_entities_services() + initial_state_helper = InitialStateHelper(entities) + climate_infos = [e for e in entities if isinstance(e, ClimateInfo)] + assert len(climate_infos) >= 1, "Expected at least 1 climate entity" + + # Subscribe with the wrapper that filters initial states + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for all initial states to be broadcast + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") + + test_climate = next( + (c for c in climate_infos if c.name == "Dual-mode Thermostat"), None + ) + assert test_climate is not None, ( + "Dual-mode Thermostat thermostat climate not found" + ) + + # Adjust setpoints + client.climate_command( + test_climate.key, + mode=ClimateMode.HEAT, + target_temperature_low=21.5, + target_temperature_high=26.5, + ) + + try: + climate_state = await asyncio.wait_for(climate_future, timeout=5.0) + except TimeoutError: + pytest.fail("Climate state not received within 5 seconds") + + assert isinstance(climate_state, aioesphomeapi.ClimateState) + assert climate_state.mode == ClimateMode.HEAT + assert climate_state.target_temperature_low == 21.5 + assert climate_state.target_temperature_high == 26.5 diff --git a/tests/integration/test_host_mode_many_entities.py b/tests/integration/test_host_mode_many_entities.py index fbe3dc25c8..299644d496 100644 --- a/tests/integration/test_host_mode_many_entities.py +++ b/tests/integration/test_host_mode_many_entities.py @@ -5,7 +5,10 @@ from __future__ import annotations import asyncio from aioesphomeapi import ( + ClimateFanMode, + ClimateFeature, ClimateInfo, + ClimateMode, DateInfo, DateState, DateTimeInfo, @@ -121,6 +124,46 @@ async def test_host_mode_many_entities( assert len(climate_infos) >= 1, "Expected at least 1 climate entity" climate_info = climate_infos[0] + + # Verify feature flags set as expected + assert climate_info.feature_flags == ( + ClimateFeature.SUPPORTS_ACTION + | ClimateFeature.SUPPORTS_CURRENT_HUMIDITY + | ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE + | ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE + | ClimateFeature.SUPPORTS_TARGET_HUMIDITY + ) + + # Verify modes + assert climate_info.supported_modes == [ + ClimateMode.OFF, + ClimateMode.COOL, + ClimateMode.HEAT, + ], f"Expected modes [OFF, COOL, HEAT], got {climate_info.supported_modes}" + + # Verify visual parameters + assert climate_info.visual_min_temperature == 15.0, ( + f"Expected min_temperature=15.0, got {climate_info.visual_min_temperature}" + ) + assert climate_info.visual_max_temperature == 32.0, ( + f"Expected max_temperature=32.0, got {climate_info.visual_max_temperature}" + ) + assert climate_info.visual_target_temperature_step == 0.1, ( + f"Expected temperature_step=0.1, got {climate_info.visual_target_temperature_step}" + ) + assert climate_info.visual_min_humidity == 20.0, ( + f"Expected min_humidity=20.0, got {climate_info.visual_min_humidity}" + ) + assert climate_info.visual_max_humidity == 70.0, ( + f"Expected max_humidity=70.0, got {climate_info.visual_max_humidity}" + ) + + # Verify fan modes + assert climate_info.supported_fan_modes == [ + ClimateFanMode.ON, + ClimateFanMode.AUTO, + ], f"Expected fan modes [ON, AUTO], got {climate_info.supported_fan_modes}" + # Verify the thermostat has presets assert len(climate_info.supported_presets) > 0, ( "Expected climate to have presets" diff --git a/tests/integration/test_light_calls.py b/tests/integration/test_light_calls.py index af90ddbe86..0eaf5af91b 100644 --- a/tests/integration/test_light_calls.py +++ b/tests/integration/test_light_calls.py @@ -8,6 +8,7 @@ import asyncio from typing import Any from aioesphomeapi import LightState +from aioesphomeapi.model import ColorMode import pytest from .types import APIClientConnectedFactory, RunCompiledFunction @@ -35,10 +36,51 @@ async def test_light_calls( # Get the light entities entities = await client.list_entities_services() lights = [e for e in entities[0] if e.object_id.startswith("test_")] - assert len(lights) >= 2 # Should have RGBCW and RGB lights + assert len(lights) >= 3 # Should have RGBCW, RGB, and Binary lights rgbcw_light = next(light for light in lights if "RGBCW" in light.name) rgb_light = next(light for light in lights if "RGB Light" in light.name) + binary_light = next(light for light in lights if "Binary" in light.name) + + # Test color mode encoding: Verify supported_color_modes contains actual ColorMode enum values + # not bit positions. This is critical - the iterator must convert bit positions to actual + # ColorMode enum values for API encoding. + + # RGBCW light (rgbww platform) should support RGB_COLD_WARM_WHITE mode + assert ColorMode.RGB_COLD_WARM_WHITE in rgbcw_light.supported_color_modes, ( + f"RGBCW light missing RGB_COLD_WARM_WHITE mode. Got: {rgbcw_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB_COLD_WARM_WHITE.value in [ + mode.value for mode in rgbcw_light.supported_color_modes + ], ( + f"RGBCW light has wrong color mode values. Expected {ColorMode.RGB_COLD_WARM_WHITE.value} " + f"(RGB_COLD_WARM_WHITE), got: {[mode.value for mode in rgbcw_light.supported_color_modes]}" + ) + + # RGB light should support RGB mode + assert ColorMode.RGB in rgb_light.supported_color_modes, ( + f"RGB light missing RGB color mode. Got: {rgb_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.RGB.value in [ + mode.value for mode in rgb_light.supported_color_modes + ], ( + f"RGB light has wrong color mode values. Expected {ColorMode.RGB.value} (RGB), got: " + f"{[mode.value for mode in rgb_light.supported_color_modes]}" + ) + + # Binary light (on/off only) should support ON_OFF mode + assert ColorMode.ON_OFF in binary_light.supported_color_modes, ( + f"Binary light missing ON_OFF color mode. Got: {binary_light.supported_color_modes}" + ) + # Verify it's the actual enum value, not bit position + assert ColorMode.ON_OFF.value in [ + mode.value for mode in binary_light.supported_color_modes + ], ( + f"Binary light has wrong color mode values. Expected {ColorMode.ON_OFF.value} (ON_OFF), got: " + f"{[mode.value for mode in binary_light.supported_color_modes]}" + ) async def wait_for_state_change(key: int, timeout: float = 1.0) -> Any: """Wait for a state change for the given entity key.""" diff --git a/tests/integration/test_noise_encryption_key_protection.py b/tests/integration/test_noise_encryption_key_protection.py index 03c43ca8d3..37d32ce2b4 100644 --- a/tests/integration/test_noise_encryption_key_protection.py +++ b/tests/integration/test_noise_encryption_key_protection.py @@ -49,3 +49,42 @@ async def test_noise_encryption_key_protection( with pytest.raises(InvalidEncryptionKeyAPIError): async with api_client_connected(noise_psk=wrong_key) as client: await client.device_info() + + +@pytest.mark.asyncio +async def test_noise_encryption_key_clear_protection( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that noise encryption key set in YAML cannot be changed via API.""" + # The key that's set in the YAML fixture + noise_psk = "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs=" + + # Keep ESPHome process running throughout all tests + async with run_compiled(yaml_config): + # First connection - test key change attempt + async with api_client_connected(noise_psk=noise_psk) as client: + # Verify connection is established + device_info = await client.device_info() + assert device_info is not None + + # Try to set a new encryption key via API + new_key = b"" # Empty key to attempt to clear + + # This should fail since key was set in YAML + success = await client.noise_encryption_set_key(new_key) + assert success is False + + # Reconnect with the original key to verify it still works + async with api_client_connected(noise_psk=noise_psk) as client: + # Verify connection is still successful with original key + device_info = await client.device_info() + assert device_info is not None + assert device_info.name == "noise-key-test" + + # Verify that connecting with a wrong key fails + wrong_key = base64.b64encode(b"y" * 32).decode() # Different key + with pytest.raises(InvalidEncryptionKeyAPIError): + async with api_client_connected(noise_psk=wrong_key) as client: + await client.device_info() diff --git a/tests/integration/test_oversized_payloads.py b/tests/integration/test_oversized_payloads.py index ba18e3d348..8bf890261a 100644 --- a/tests/integration/test_oversized_payloads.py +++ b/tests/integration/test_oversized_payloads.py @@ -281,8 +281,12 @@ async def test_noise_corrupt_encrypted_frame( # Check for signs that the process exited/crashed if "Segmentation fault" in line or "core dumped" in line: process_exited = True - # Check for the expected warning about decryption failure + # Check for the expected log about decryption failure + # This can appear as either a VV-level log from noise or a W-level log from connection if ( + "[VV][api.noise" in line + and "noise_cipherstate_decrypt failed: MAC_FAILURE" in line + ) or ( "[W][api.connection" in line and "Reading failed CIPHERSTATE_DECRYPT_FAILED" in line ): @@ -322,9 +326,9 @@ async def test_noise_corrupt_encrypted_frame( assert not process_exited, ( "ESPHome process should not crash on corrupt encrypted frames" ) - # Verify we saw the expected warning message + # Verify we saw the expected log message about decryption failure assert cipherstate_failed, ( - "Expected to see warning about CIPHERSTATE_DECRYPT_FAILED" + "Expected to see log about noise_cipherstate_decrypt failure or CIPHERSTATE_DECRYPT_FAILED" ) # Verify we can still reconnect after handling the corrupt frame diff --git a/tests/integration/test_script_queued.py b/tests/integration/test_script_queued.py index 9f4bce6f31..ce1c25b649 100644 --- a/tests/integration/test_script_queued.py +++ b/tests/integration/test_script_queued.py @@ -31,9 +31,7 @@ async def test_script_queued( # Patterns for Test 1: Queue depth queue_start = re.compile(r"Queue test: START item (\d+)") queue_end = re.compile(r"Queue test: END item (\d+)") - queue_reject = re.compile( - r"Script 'queue_depth_script' maximum number of queued runs exceeded!" - ) + queue_reject = re.compile(r"Script 'queue_depth_script' max instances") # Patterns for Test 2: Ring buffer ring_start = re.compile(r"Ring buffer: START '([A-Z])'") @@ -46,9 +44,7 @@ async def test_script_queued( # Patterns for Test 4: Rejection reject_start = re.compile(r"Rejection test: START (\d+)") reject_end = re.compile(r"Rejection test: END (\d+)") - reject_reject = re.compile( - r"Script 'rejection_script' maximum number of queued runs exceeded!" - ) + reject_reject = re.compile(r"Script 'rejection_script' max instances") # Patterns for Test 5: No params no_params_end = re.compile(r"No params: END") diff --git a/tests/integration/test_sensor_filters_ring_buffer.py b/tests/integration/test_sensor_filters_ring_buffer.py index 8edb1600d9..c8be8edce0 100644 --- a/tests/integration/test_sensor_filters_ring_buffer.py +++ b/tests/integration/test_sensor_filters_ring_buffer.py @@ -7,7 +7,7 @@ import asyncio from aioesphomeapi import EntityState, SensorState import pytest -from .sensor_test_utils import build_key_to_sensor_mapping +from .state_utils import InitialStateHelper, build_key_to_entity_mapping from .types import APIClientConnectedFactory, RunCompiledFunction @@ -36,7 +36,7 @@ async def test_sensor_filters_ring_buffer( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return @@ -66,7 +66,7 @@ async def test_sensor_filters_ring_buffer( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping( + key_to_sensor = build_key_to_entity_mapping( entities, [ "sliding_min", @@ -76,8 +76,17 @@ async def test_sensor_filters_ring_buffer( ], ) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( @@ -112,31 +121,31 @@ async def test_sensor_filters_ring_buffer( # Verify the values at each output position # Position 1: window=[1] - assert abs(sensor_states["sliding_min"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_median"][0] - 1.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][0] - 1.0) < 0.01 + assert sensor_states["sliding_min"][0] == pytest.approx(1.0) + assert sensor_states["sliding_max"][0] == pytest.approx(1.0) + assert sensor_states["sliding_median"][0] == pytest.approx(1.0) + assert sensor_states["sliding_moving_avg"][0] == pytest.approx(1.0) # Position 3: window=[1,2,3] - assert abs(sensor_states["sliding_min"][1] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][1] - 3.0) < 0.01 - assert abs(sensor_states["sliding_median"][1] - 2.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][1] - 2.0) < 0.01 + assert sensor_states["sliding_min"][1] == pytest.approx(1.0) + assert sensor_states["sliding_max"][1] == pytest.approx(3.0) + assert sensor_states["sliding_median"][1] == pytest.approx(2.0) + assert sensor_states["sliding_moving_avg"][1] == pytest.approx(2.0) # Position 5: window=[1,2,3,4,5] - assert abs(sensor_states["sliding_min"][2] - 1.0) < 0.01 - assert abs(sensor_states["sliding_max"][2] - 5.0) < 0.01 - assert abs(sensor_states["sliding_median"][2] - 3.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][2] - 3.0) < 0.01 + assert sensor_states["sliding_min"][2] == pytest.approx(1.0) + assert sensor_states["sliding_max"][2] == pytest.approx(5.0) + assert sensor_states["sliding_median"][2] == pytest.approx(3.0) + assert sensor_states["sliding_moving_avg"][2] == pytest.approx(3.0) # Position 7: window=[3,4,5,6,7] (ring buffer wrapped) - assert abs(sensor_states["sliding_min"][3] - 3.0) < 0.01 - assert abs(sensor_states["sliding_max"][3] - 7.0) < 0.01 - assert abs(sensor_states["sliding_median"][3] - 5.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][3] - 5.0) < 0.01 + assert sensor_states["sliding_min"][3] == pytest.approx(3.0) + assert sensor_states["sliding_max"][3] == pytest.approx(7.0) + assert sensor_states["sliding_median"][3] == pytest.approx(5.0) + assert sensor_states["sliding_moving_avg"][3] == pytest.approx(5.0) # Position 9: window=[5,6,7,8,9] (ring buffer wrapped) - assert abs(sensor_states["sliding_min"][4] - 5.0) < 0.01 - assert abs(sensor_states["sliding_max"][4] - 9.0) < 0.01 - assert abs(sensor_states["sliding_median"][4] - 7.0) < 0.01 - assert abs(sensor_states["sliding_moving_avg"][4] - 7.0) < 0.01 + assert sensor_states["sliding_min"][4] == pytest.approx(5.0) + assert sensor_states["sliding_max"][4] == pytest.approx(9.0) + assert sensor_states["sliding_median"][4] == pytest.approx(7.0) + assert sensor_states["sliding_moving_avg"][4] == pytest.approx(7.0) diff --git a/tests/integration/test_sensor_filters_sliding_window.py b/tests/integration/test_sensor_filters_sliding_window.py index 2183946134..b0688a6536 100644 --- a/tests/integration/test_sensor_filters_sliding_window.py +++ b/tests/integration/test_sensor_filters_sliding_window.py @@ -7,7 +7,7 @@ import asyncio from aioesphomeapi import EntityState, SensorState import pytest -from .sensor_test_utils import build_key_to_sensor_mapping +from .state_utils import InitialStateHelper, build_key_to_entity_mapping from .types import APIClientConnectedFactory, RunCompiledFunction @@ -41,7 +41,7 @@ async def test_sensor_filters_sliding_window( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return @@ -57,33 +57,33 @@ async def test_sensor_filters_sliding_window( # Filters send at position 1 and position 6 (send_every=5 means every 5th value after first) if ( sensor_name == "min_sensor" - and abs(state.state - 2.0) < 0.01 + and state.state == pytest.approx(2.0) and not min_received.done() ): min_received.set_result(True) elif ( sensor_name == "max_sensor" - and abs(state.state - 6.0) < 0.01 + and state.state == pytest.approx(6.0) and not max_received.done() ): max_received.set_result(True) elif ( sensor_name == "median_sensor" - and abs(state.state - 4.0) < 0.01 + and state.state == pytest.approx(4.0) and not median_received.done() ): # Median of [2, 3, 4, 5, 6] = 4 median_received.set_result(True) elif ( sensor_name == "quantile_sensor" - and abs(state.state - 6.0) < 0.01 + and state.state == pytest.approx(6.0) and not quantile_received.done() ): # 90th percentile of [2, 3, 4, 5, 6] = 6 quantile_received.set_result(True) elif ( sensor_name == "moving_avg_sensor" - and abs(state.state - 4.0) < 0.01 + and state.state == pytest.approx(4.0) and not moving_avg_received.done() ): # Average of [2, 3, 4, 5, 6] = 4 @@ -97,7 +97,7 @@ async def test_sensor_filters_sliding_window( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping( + key_to_sensor = build_key_to_entity_mapping( entities, [ "min_sensor", @@ -108,8 +108,17 @@ async def test_sensor_filters_sliding_window( ], ) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states to be sent before pressing button + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( @@ -158,30 +167,30 @@ async def test_sensor_filters_sliding_window( assert len(sensor_states["moving_avg_sensor"]) == 2 # Verify the first output (after 1 value: [1]) - assert abs(sensor_states["min_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["min_sensor"][0] == pytest.approx(1.0), ( f"First min should be 1.0, got {sensor_states['min_sensor'][0]}" ) - assert abs(sensor_states["max_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["max_sensor"][0] == pytest.approx(1.0), ( f"First max should be 1.0, got {sensor_states['max_sensor'][0]}" ) - assert abs(sensor_states["median_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["median_sensor"][0] == pytest.approx(1.0), ( f"First median should be 1.0, got {sensor_states['median_sensor'][0]}" ) - assert abs(sensor_states["moving_avg_sensor"][0] - 1.0) < 0.01, ( + assert sensor_states["moving_avg_sensor"][0] == pytest.approx(1.0), ( f"First moving avg should be 1.0, got {sensor_states['moving_avg_sensor'][0]}" ) # Verify the second output (after 6 values, window has [2, 3, 4, 5, 6]) - assert abs(sensor_states["min_sensor"][1] - 2.0) < 0.01, ( + assert sensor_states["min_sensor"][1] == pytest.approx(2.0), ( f"Second min should be 2.0, got {sensor_states['min_sensor'][1]}" ) - assert abs(sensor_states["max_sensor"][1] - 6.0) < 0.01, ( + assert sensor_states["max_sensor"][1] == pytest.approx(6.0), ( f"Second max should be 6.0, got {sensor_states['max_sensor'][1]}" ) - assert abs(sensor_states["median_sensor"][1] - 4.0) < 0.01, ( + assert sensor_states["median_sensor"][1] == pytest.approx(4.0), ( f"Second median should be 4.0, got {sensor_states['median_sensor'][1]}" ) - assert abs(sensor_states["moving_avg_sensor"][1] - 4.0) < 0.01, ( + assert sensor_states["moving_avg_sensor"][1] == pytest.approx(4.0), ( f"Second moving avg should be 4.0, got {sensor_states['moving_avg_sensor'][1]}" ) @@ -207,11 +216,12 @@ async def test_sensor_filters_nan_handling( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return sensor_name = key_to_sensor.get(state.key) + if sensor_name == "min_nan": min_states.append(state.state) elif sensor_name == "max_nan": @@ -234,10 +244,19 @@ async def test_sensor_filters_nan_handling( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping(entities, ["min_nan", "max_nan"]) + key_to_sensor = build_key_to_entity_mapping(entities, ["min_nan", "max_nan"]) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial states") # Find the publish button publish_button = next( @@ -271,18 +290,18 @@ async def test_sensor_filters_nan_handling( ) # First output - assert abs(min_states[0] - 10.0) < 0.01, ( + assert min_states[0] == pytest.approx(10.0), ( f"First min should be 10.0, got {min_states[0]}" ) - assert abs(max_states[0] - 10.0) < 0.01, ( + assert max_states[0] == pytest.approx(10.0), ( f"First max should be 10.0, got {max_states[0]}" ) # Second output - verify NaN values were ignored - assert abs(min_states[1] - 5.0) < 0.01, ( + assert min_states[1] == pytest.approx(5.0), ( f"Second min should ignore NaN and return 5.0, got {min_states[1]}" ) - assert abs(max_states[1] - 15.0) < 0.01, ( + assert max_states[1] == pytest.approx(15.0), ( f"Second max should ignore NaN and return 15.0, got {max_states[1]}" ) @@ -305,11 +324,12 @@ async def test_sensor_filters_ring_buffer_wraparound( if not isinstance(state, SensorState): return - # Skip NaN values (initial states) + # Skip NaN values if state.missing_state: return sensor_name = key_to_sensor.get(state.key) + if sensor_name == "wraparound_min": min_states.append(state.state) # With batch_delay: 0ms, we should receive all 3 outputs @@ -324,10 +344,19 @@ async def test_sensor_filters_ring_buffer_wraparound( entities, services = await client.list_entities_services() # Build key-to-sensor mapping - key_to_sensor = build_key_to_sensor_mapping(entities, ["wraparound_min"]) + key_to_sensor = build_key_to_entity_mapping(entities, ["wraparound_min"]) - # Subscribe to state changes AFTER building mapping - client.subscribe_states(on_state) + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial state + try: + await initial_state_helper.wait_for_initial_states() + except TimeoutError: + pytest.fail("Timeout waiting for initial state") # Find the publish button publish_button = next( @@ -355,12 +384,12 @@ async def test_sensor_filters_ring_buffer_wraparound( assert len(min_states) == 3, ( f"Should have 3 states, got {len(min_states)}: {min_states}" ) - assert abs(min_states[0] - 10.0) < 0.01, ( + assert min_states[0] == pytest.approx(10.0), ( f"First min should be 10.0, got {min_states[0]}" ) - assert abs(min_states[1] - 5.0) < 0.01, ( + assert min_states[1] == pytest.approx(5.0), ( f"Second min should be 5.0, got {min_states[1]}" ) - assert abs(min_states[2] - 15.0) < 0.01, ( + assert min_states[2] == pytest.approx(15.0), ( f"Third min should be 15.0, got {min_states[2]}" ) diff --git a/tests/integration/test_sensor_filters_value_list.py b/tests/integration/test_sensor_filters_value_list.py new file mode 100644 index 0000000000..87323fc730 --- /dev/null +++ b/tests/integration/test_sensor_filters_value_list.py @@ -0,0 +1,263 @@ +"""Test sensor ValueListFilter functionality (FilterOutValueFilter and ThrottleWithPriorityFilter).""" + +from __future__ import annotations + +import asyncio +import math + +from aioesphomeapi import ButtonInfo, EntityState, SensorState +import pytest + +from .state_utils import InitialStateHelper, build_key_to_entity_mapping +from .types import APIClientConnectedFactory, RunCompiledFunction + + +@pytest.mark.asyncio +async def test_sensor_filters_value_list( + yaml_config: str, + run_compiled: RunCompiledFunction, + api_client_connected: APIClientConnectedFactory, +) -> None: + """Test that ValueListFilter-based filters work correctly.""" + loop = asyncio.get_running_loop() + + # Track state changes for all sensors + sensor_values: dict[str, list[float]] = { + "filter_out_single": [], + "filter_out_multiple": [], + "throttle_priority_single": [], + "throttle_priority_multiple": [], + "filter_out_nan_test": [], + "filter_out_accuracy_2": [], + "throttle_priority_nan": [], + } + + # Futures for each test + filter_out_single_done = loop.create_future() + filter_out_multiple_done = loop.create_future() + throttle_single_done = loop.create_future() + throttle_multiple_done = loop.create_future() + filter_out_nan_done = loop.create_future() + filter_out_accuracy_2_done = loop.create_future() + throttle_nan_done = loop.create_future() + + def on_state(state: EntityState) -> None: + """Track sensor state updates.""" + if not isinstance(state, SensorState) or state.missing_state: + return + + sensor_name = key_to_sensor.get(state.key) + if sensor_name not in sensor_values: + return + + sensor_values[sensor_name].append(state.state) + + # Check completion conditions + if ( + sensor_name == "filter_out_single" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_single_done.done() + ): + filter_out_single_done.set_result(True) + elif ( + sensor_name == "filter_out_multiple" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_multiple_done.done() + ): + filter_out_multiple_done.set_result(True) + elif ( + sensor_name == "throttle_priority_single" + and len(sensor_values[sensor_name]) == 3 + and not throttle_single_done.done() + ): + throttle_single_done.set_result(True) + elif ( + sensor_name == "throttle_priority_multiple" + and len(sensor_values[sensor_name]) == 4 + and not throttle_multiple_done.done() + ): + throttle_multiple_done.set_result(True) + elif ( + sensor_name == "filter_out_nan_test" + and len(sensor_values[sensor_name]) == 3 + and not filter_out_nan_done.done() + ): + filter_out_nan_done.set_result(True) + elif ( + sensor_name == "filter_out_accuracy_2" + and len(sensor_values[sensor_name]) == 2 + and not filter_out_accuracy_2_done.done() + ): + filter_out_accuracy_2_done.set_result(True) + elif ( + sensor_name == "throttle_priority_nan" + and len(sensor_values[sensor_name]) == 3 + and not throttle_nan_done.done() + ): + throttle_nan_done.set_result(True) + + async with ( + run_compiled(yaml_config), + api_client_connected() as client, + ): + # Get entities and build key mapping + entities, _ = await client.list_entities_services() + key_to_sensor = build_key_to_entity_mapping( + entities, + { + "filter_out_single": "Filter Out Single", + "filter_out_multiple": "Filter Out Multiple", + "throttle_priority_single": "Throttle Priority Single", + "throttle_priority_multiple": "Throttle Priority Multiple", + "filter_out_nan_test": "Filter Out NaN Test", + "filter_out_accuracy_2": "Filter Out Accuracy 2", + "throttle_priority_nan": "Throttle Priority NaN", + }, + ) + + # Set up initial state helper with all entities + initial_state_helper = InitialStateHelper(entities) + + # Subscribe to state changes with wrapper + client.subscribe_states(initial_state_helper.on_state_wrapper(on_state)) + + # Wait for initial states + await initial_state_helper.wait_for_initial_states() + + # Find all buttons + button_name_map = { + "Test Filter Out Single": "filter_out_single", + "Test Filter Out Multiple": "filter_out_multiple", + "Test Throttle Priority Single": "throttle_priority_single", + "Test Throttle Priority Multiple": "throttle_priority_multiple", + "Test Filter Out NaN": "filter_out_nan", + "Test Filter Out Accuracy 2": "filter_out_accuracy_2", + "Test Throttle Priority NaN": "throttle_priority_nan", + } + buttons = {} + for entity in entities: + if isinstance(entity, ButtonInfo) and entity.name in button_name_map: + buttons[button_name_map[entity.name]] = entity.key + + assert len(buttons) == 7, f"Expected 7 buttons, found {len(buttons)}" + + # Test 1: FilterOutValueFilter - single value + sensor_values["filter_out_single"].clear() + client.button_command(buttons["filter_out_single"]) + try: + await asyncio.wait_for(filter_out_single_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 1 timed out. Values: {sensor_values['filter_out_single']}" + ) + + expected = [1.0, 2.0, 3.0] + assert sensor_values["filter_out_single"] == pytest.approx(expected), ( + f"Test 1 failed: expected {expected}, got {sensor_values['filter_out_single']}" + ) + + # Test 2: FilterOutValueFilter - multiple values + sensor_values["filter_out_multiple"].clear() + filter_out_multiple_done = loop.create_future() + client.button_command(buttons["filter_out_multiple"]) + try: + await asyncio.wait_for(filter_out_multiple_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 2 timed out. Values: {sensor_values['filter_out_multiple']}" + ) + + expected = [1.0, 2.0, 50.0] + assert sensor_values["filter_out_multiple"] == pytest.approx(expected), ( + f"Test 2 failed: expected {expected}, got {sensor_values['filter_out_multiple']}" + ) + + # Test 3: ThrottleWithPriorityFilter - single priority + sensor_values["throttle_priority_single"].clear() + throttle_single_done = loop.create_future() + client.button_command(buttons["throttle_priority_single"]) + try: + await asyncio.wait_for(throttle_single_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 3 timed out. Values: {sensor_values['throttle_priority_single']}" + ) + + expected = [1.0, 42.0, 4.0] + assert sensor_values["throttle_priority_single"] == pytest.approx(expected), ( + f"Test 3 failed: expected {expected}, got {sensor_values['throttle_priority_single']}" + ) + + # Test 4: ThrottleWithPriorityFilter - multiple priorities + sensor_values["throttle_priority_multiple"].clear() + throttle_multiple_done = loop.create_future() + client.button_command(buttons["throttle_priority_multiple"]) + try: + await asyncio.wait_for(throttle_multiple_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 4 timed out. Values: {sensor_values['throttle_priority_multiple']}" + ) + + expected = [1.0, 0.0, 42.0, 100.0] + assert sensor_values["throttle_priority_multiple"] == pytest.approx(expected), ( + f"Test 4 failed: expected {expected}, got {sensor_values['throttle_priority_multiple']}" + ) + + # Test 5: FilterOutValueFilter - NaN handling + sensor_values["filter_out_nan_test"].clear() + filter_out_nan_done = loop.create_future() + client.button_command(buttons["filter_out_nan"]) + try: + await asyncio.wait_for(filter_out_nan_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 5 timed out. Values: {sensor_values['filter_out_nan_test']}" + ) + + expected = [1.0, 2.0, 3.0] + assert sensor_values["filter_out_nan_test"] == pytest.approx(expected), ( + f"Test 5 failed: expected {expected}, got {sensor_values['filter_out_nan_test']}" + ) + + # Test 6: FilterOutValueFilter - Accuracy decimals (2) + sensor_values["filter_out_accuracy_2"].clear() + filter_out_accuracy_2_done = loop.create_future() + client.button_command(buttons["filter_out_accuracy_2"]) + try: + await asyncio.wait_for(filter_out_accuracy_2_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 6 timed out. Values: {sensor_values['filter_out_accuracy_2']}" + ) + + expected = [42.01, 42.15] + assert sensor_values["filter_out_accuracy_2"] == pytest.approx(expected), ( + f"Test 6 failed: expected {expected}, got {sensor_values['filter_out_accuracy_2']}" + ) + + # Test 7: ThrottleWithPriorityFilter - NaN priority + sensor_values["throttle_priority_nan"].clear() + throttle_nan_done = loop.create_future() + client.button_command(buttons["throttle_priority_nan"]) + try: + await asyncio.wait_for(throttle_nan_done, timeout=2.0) + except TimeoutError: + pytest.fail( + f"Test 7 timed out. Values: {sensor_values['throttle_priority_nan']}" + ) + + # First value (1.0) + two NaN priority values + # NaN values will be compared using math.isnan + assert len(sensor_values["throttle_priority_nan"]) == 3, ( + f"Test 7 failed: expected 3 values, got {len(sensor_values['throttle_priority_nan'])}" + ) + assert sensor_values["throttle_priority_nan"][0] == pytest.approx(1.0), ( + f"Test 7 failed: first value should be 1.0, got {sensor_values['throttle_priority_nan'][0]}" + ) + assert math.isnan(sensor_values["throttle_priority_nan"][1]), ( + f"Test 7 failed: second value should be NaN, got {sensor_values['throttle_priority_nan'][1]}" + ) + assert math.isnan(sensor_values["throttle_priority_nan"][2]), ( + f"Test 7 failed: third value should be NaN, got {sensor_values['throttle_priority_nan'][2]}" + ) diff --git a/tests/script/test_determine_jobs.py b/tests/script/test_determine_jobs.py index 0559d116be..c9ccf53252 100644 --- a/tests/script/test_determine_jobs.py +++ b/tests/script/test_determine_jobs.py @@ -5,7 +5,6 @@ import importlib.util import json import os from pathlib import Path -import subprocess import sys from unittest.mock import Mock, call, patch @@ -17,6 +16,9 @@ script_dir = os.path.abspath( ) sys.path.insert(0, script_dir) +# Import helpers module for patching +import helpers # noqa: E402 + spec = importlib.util.spec_from_file_location( "determine_jobs", os.path.join(script_dir, "determine-jobs.py") ) @@ -53,35 +55,76 @@ def mock_should_run_python_linters() -> Generator[Mock, None, None]: @pytest.fixture -def mock_subprocess_run() -> Generator[Mock, None, None]: - """Mock subprocess.run for list-components.py calls.""" - with patch.object(determine_jobs.subprocess, "run") as mock: +def mock_determine_cpp_unit_tests() -> Generator[Mock, None, None]: + """Mock determine_cpp_unit_tests from helpers.""" + with patch.object(determine_jobs, "determine_cpp_unit_tests") as mock: yield mock +@pytest.fixture +def mock_changed_files() -> Generator[Mock, None, None]: + """Mock changed_files for memory impact detection.""" + with patch.object(determine_jobs, "changed_files") as mock: + # Default to empty list + mock.return_value = [] + yield mock + + +@pytest.fixture(autouse=True) +def clear_clang_tidy_cache() -> None: + """Clear the clang-tidy full scan cache before each test.""" + determine_jobs._is_clang_tidy_full_scan.cache_clear() + + def test_main_all_tests_should_run( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, + mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when all tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = True mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = True mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (False, ["wifi", "api", "sensor"]) - # Mock list-components.py output (now returns JSON with --changed-with-deps) - mock_result = Mock() - mock_result.stdout = json.dumps( - {"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]} - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return non-component files (to avoid memory impact) + # Memory impact only runs when component C++ files change + mock_changed_files.return_value = [ + "esphome/config.py", + "esphome/helpers.py", + ] # Run main function with mocked argv - with patch("sys.argv", ["determine-jobs.py"]): + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["wifi", "api", "sensor"], + ), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["wifi", "api"] if not deps else ["wifi", "api", "sensor"] + ), + ), + ): determine_jobs.main() # Check output @@ -90,6 +133,7 @@ def test_main_all_tests_should_run( assert output["integration_tests"] is True assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] in ["nosplit", "split"] assert output["clang_format"] is True assert output["python_linters"] is True assert output["changed_components"] == ["wifi", "api", "sensor"] @@ -100,6 +144,14 @@ def test_main_all_tests_should_run( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) + # memory_impact should be false (no component C++ files changed) + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == ["wifi", "api", "sensor"] def test_main_no_tests_should_run( @@ -107,22 +159,35 @@ def test_main_no_tests_should_run( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, + mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test when no tests should run.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False + mock_determine_cpp_unit_tests.return_value = (False, []) - # Mock empty list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []}) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return no component files + mock_changed_files.return_value = [] # Run main function with mocked argv - with patch("sys.argv", ["determine-jobs.py"]): + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "get_changed_components", return_value=[]), + patch.object( + determine_jobs, "filter_component_and_test_files", return_value=False + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=[] + ), + ): determine_jobs.main() # Check output @@ -131,36 +196,19 @@ def test_main_no_tests_should_run( assert output["integration_tests"] is False assert output["clang_tidy"] is False + assert output["clang_tidy_mode"] == "disabled" assert output["clang_format"] is False assert output["python_linters"] is False assert output["changed_components"] == [] assert output["changed_components_with_tests"] == [] assert output["component_test_count"] == 0 - - -def test_main_list_components_fails( - mock_should_run_integration_tests: Mock, - mock_should_run_clang_tidy: Mock, - mock_should_run_clang_format: Mock, - mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, - capsys: pytest.CaptureFixture[str], -) -> None: - """Test when list-components.py fails.""" - mock_should_run_integration_tests.return_value = True - mock_should_run_clang_tidy.return_value = True - mock_should_run_clang_format.return_value = True - mock_should_run_python_linters.return_value = True - - # Mock list-components.py failure - mock_subprocess_run.side_effect = subprocess.CalledProcessError(1, "cmd") - - # Run main function with mocked argv - should raise - with ( - patch("sys.argv", ["determine-jobs.py"]), - pytest.raises(subprocess.CalledProcessError), - ): - determine_jobs.main() + # changed_cpp_file_count should be 0 + assert output["changed_cpp_file_count"] == 0 + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == [] def test_main_with_branch_argument( @@ -168,23 +216,38 @@ def test_main_with_branch_argument( mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, + mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test with branch argument.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = True mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (False, ["mqtt"]) - # Mock list-components.py output - mock_result = Mock() - mock_result.stdout = json.dumps( - {"directly_changed": ["mqtt"], "all_changed": ["mqtt"]} - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return non-component files (to avoid memory impact) + # Memory impact only runs when component C++ files change + mock_changed_files.return_value = ["esphome/config.py"] - with patch("sys.argv", ["script.py", "-b", "main"]): + with ( + patch("sys.argv", ["script.py", "-b", "main"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=["mqtt"] + ), + ): determine_jobs.main() # Check that functions were called with branch @@ -193,19 +256,13 @@ def test_main_with_branch_argument( mock_should_run_clang_format.assert_called_once_with("main") mock_should_run_python_linters.assert_called_once_with("main") - # Check that list-components.py was called with branch - mock_subprocess_run.assert_called_once() - call_args = mock_subprocess_run.call_args[0][0] - assert "--changed-with-deps" in call_args - assert "-b" in call_args - assert "main" in call_args - # Check output captured = capsys.readouterr() output = json.loads(captured.out) assert output["integration_tests"] is False assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] in ["nosplit", "split"] assert output["clang_format"] is False assert output["python_linters"] is True assert output["changed_components"] == ["mqtt"] @@ -216,6 +273,14 @@ def test_main_with_branch_argument( assert output["component_test_count"] == len( output["changed_components_with_tests"] ) + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) + # memory_impact should be false (no component C++ files changed) + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" + assert output["cpp_unit_tests_run_all"] is False + assert output["cpp_unit_tests_components"] == ["mqtt"] def test_should_run_integration_tests( @@ -316,16 +381,6 @@ def test_should_run_clang_tidy_hash_check_exception() -> None: result = determine_jobs.should_run_clang_tidy() assert result is True # Fail safe - run clang-tidy - # Even with C++ files, exception should trigger clang-tidy - with ( - patch.object( - determine_jobs, "changed_files", return_value=["esphome/core.cpp"] - ), - patch("subprocess.run", side_effect=Exception("Hash check failed")), - ): - result = determine_jobs.should_run_clang_tidy() - assert result is True - def test_should_run_clang_tidy_with_branch() -> None: """Test should_run_clang_tidy with branch argument.""" @@ -397,31 +452,64 @@ def test_should_run_clang_format_with_branch() -> None: mock_changed.assert_called_once_with("release") +@pytest.mark.parametrize( + ("changed_files", "expected_count"), + [ + (["esphome/core.cpp"], 1), + (["esphome/core.h"], 1), + (["test.hpp"], 1), + (["test.cc"], 1), + (["test.cxx"], 1), + (["test.c"], 1), + (["test.tcc"], 1), + (["esphome/core.cpp", "esphome/core.h"], 2), + (["esphome/core.cpp", "esphome/core.h", "test.cc"], 3), + (["README.md"], 0), + (["esphome/config.py"], 0), + (["README.md", "esphome/config.py"], 0), + (["esphome/core.cpp", "README.md", "esphome/config.py"], 1), + ([], 0), + ], +) +def test_count_changed_cpp_files(changed_files: list[str], expected_count: int) -> None: + """Test count_changed_cpp_files function.""" + with patch.object(determine_jobs, "changed_files", return_value=changed_files): + result = determine_jobs.count_changed_cpp_files() + assert result == expected_count + + +def test_count_changed_cpp_files_with_branch() -> None: + """Test count_changed_cpp_files with branch argument.""" + with patch.object(determine_jobs, "changed_files") as mock_changed: + mock_changed.return_value = [] + determine_jobs.count_changed_cpp_files("release") + mock_changed.assert_called_once_with("release") + + def test_main_filters_components_without_tests( mock_should_run_integration_tests: Mock, mock_should_run_clang_tidy: Mock, mock_should_run_clang_format: Mock, mock_should_run_python_linters: Mock, - mock_subprocess_run: Mock, + mock_changed_files: Mock, capsys: pytest.CaptureFixture[str], tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test that components without test files are filtered out.""" + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + mock_should_run_integration_tests.return_value = False mock_should_run_clang_tidy.return_value = False mock_should_run_clang_format.return_value = False mock_should_run_python_linters.return_value = False - # Mock list-components.py output with 3 components - # wifi: has tests, sensor: has tests, airthings_ble: no tests - mock_result = Mock() - mock_result.stdout = json.dumps( - { - "directly_changed": ["wifi", "sensor"], - "all_changed": ["wifi", "sensor", "airthings_ble"], - } - ) - mock_subprocess_run.return_value = mock_result + # Mock changed_files to return component files + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/sensor/sensor.h", + ] # Create test directory structure tests_dir = tmp_path / "tests" / "components" @@ -440,10 +528,29 @@ def test_main_filters_components_without_tests( airthings_dir = tests_dir / "airthings_ble" airthings_dir.mkdir(parents=True) - # Mock root_path to use tmp_path + # Mock root_path to use tmp_path (need to patch both determine_jobs and helpers) with ( patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), patch("sys.argv", ["determine-jobs.py"]), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["wifi", "sensor", "airthings_ble"], + ), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["wifi", "sensor"] if not deps else ["wifi", "sensor", "airthings_ble"] + ), + ), + patch.object(determine_jobs, "changed_files", return_value=[]), ): # Clear the cache since we're mocking root_path determine_jobs._component_has_tests.cache_clear() @@ -459,3 +566,548 @@ def test_main_filters_components_without_tests( assert set(output["changed_components_with_tests"]) == {"wifi", "sensor"} # component_test_count should be based on components with tests assert output["component_test_count"] == 2 + # changed_cpp_file_count should be present + assert "changed_cpp_file_count" in output + assert isinstance(output["changed_cpp_file_count"], int) + # memory_impact should be present + assert "memory_impact" in output + assert output["memory_impact"]["should_run"] == "false" + + +def test_main_detects_components_with_variant_tests( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + tmp_path: Path, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that components with only variant test files (test-*.yaml) are detected. + + This test verifies the fix for components like improv_serial, ethernet, mdns, + improv_base, and safe_mode which only have variant test files (test-*.yaml) + instead of base test files (test.*.yaml). + """ + # Ensure we're not in GITHUB_ACTIONS mode for this test + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = False + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Mock changed_files to return component files + mock_changed_files.return_value = [ + "esphome/components/improv_serial/improv_serial.cpp", + "esphome/components/ethernet/ethernet.cpp", + "esphome/components/no_tests/component.cpp", + ] + + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # improv_serial has only variant tests (like the real component) + improv_serial_dir = tests_dir / "improv_serial" + improv_serial_dir.mkdir(parents=True) + (improv_serial_dir / "test-uart0.esp32-idf.yaml").write_text("test: config") + (improv_serial_dir / "test-uart0.esp8266-ard.yaml").write_text("test: config") + (improv_serial_dir / "test-usb_cdc.esp32-s2-idf.yaml").write_text("test: config") + + # ethernet also has only variant tests + ethernet_dir = tests_dir / "ethernet" + ethernet_dir.mkdir(parents=True) + (ethernet_dir / "test-manual_ip.esp32-idf.yaml").write_text("test: config") + (ethernet_dir / "test-dhcp.esp32-idf.yaml").write_text("test: config") + + # no_tests component has no test files at all + no_tests_dir = tests_dir / "no_tests" + no_tests_dir.mkdir(parents=True) + + # Mock root_path to use tmp_path (need to patch both determine_jobs and helpers) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch("sys.argv", ["determine-jobs.py"]), + patch.object( + determine_jobs, + "get_changed_components", + return_value=["improv_serial", "ethernet", "no_tests"], + ), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["improv_serial", "ethernet"] + if not deps + else ["improv_serial", "ethernet", "no_tests"] + ), + ), + patch.object(determine_jobs, "changed_files", return_value=[]), + ): + # Clear the cache since we're mocking root_path + determine_jobs._component_has_tests.cache_clear() + determine_jobs.main() + + # Check output + captured = capsys.readouterr() + output = json.loads(captured.out) + + # changed_components should have all components + assert set(output["changed_components"]) == { + "improv_serial", + "ethernet", + "no_tests", + } + # changed_components_with_tests should include components with variant tests + assert set(output["changed_components_with_tests"]) == {"improv_serial", "ethernet"} + # component_test_count should be 2 (improv_serial and ethernet) + assert output["component_test_count"] == 2 + # no_tests should be excluded since it has no test files + assert "no_tests" not in output["changed_components_with_tests"] + + +# Tests for detect_memory_impact_config function + + +def test_detect_memory_impact_config_with_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components share a common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component with esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # api component with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return wifi and api component changes + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/api/api.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "api"} + assert result["platform"] == "esp32-idf" # Common platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_core_only_changes(tmp_path: Path) -> None: + """Test memory impact detection with core C++ changes (no component changes).""" + # Create test directory structure with fallback component + tests_dir = tmp_path / "tests" / "components" + + # api component (fallback component) with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return only core C++ files (no component files) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/core/application.cpp", + "esphome/core/component.h", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "true" + assert result["components"] == ["api"] # Fallback component + assert result["platform"] == "esp32-idf" # Fallback platform + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_core_python_only_changes(tmp_path: Path) -> None: + """Test that Python-only core changes don't trigger memory impact analysis.""" + # Create test directory structure with fallback component + tests_dir = tmp_path / "tests" / "components" + + # api component (fallback component) with esp32-idf test + api_dir = tests_dir / "api" + api_dir.mkdir(parents=True) + (api_dir / "test.esp32-idf.yaml").write_text("test: api") + + # Mock changed_files to return only core Python files (no C++ files) + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/__main__.py", + "esphome/config.py", + "esphome/core/config.py", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Python-only changes should NOT trigger memory impact analysis + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_no_common_platform(tmp_path: Path) -> None: + """Test memory impact detection when components have no common platform.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # wifi component only has esp32-idf test + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # logger component only has esp8266-ard test + logger_dir = tests_dir / "logger" + logger_dir.mkdir(parents=True) + (logger_dir / "test.esp8266-ard.yaml").write_text("test: logger") + + # Mock changed_files to return both components + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/wifi/wifi.cpp", + "esphome/components/logger/logger.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should pick the most frequently supported platform + assert result["should_run"] == "true" + assert set(result["components"]) == {"wifi", "logger"} + # When no common platform, picks most commonly supported + # esp8266-ard is preferred over esp32-idf in the preference list + assert result["platform"] in ["esp32-idf", "esp8266-ard"] + assert result["use_merged_config"] == "true" + + +def test_detect_memory_impact_config_no_changes(tmp_path: Path) -> None: + """Test memory impact detection when no files changed.""" + # Mock changed_files to return empty list + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_no_components_with_tests(tmp_path: Path) -> None: + """Test memory impact detection when changed components have no tests.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # Create component directory but no test files + custom_component_dir = tests_dir / "my_custom_component" + custom_component_dir.mkdir(parents=True) + + # Mock changed_files to return component without tests + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/my_custom_component/component.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + assert result["should_run"] == "false" + + +def test_detect_memory_impact_config_skips_base_bus_components(tmp_path: Path) -> None: + """Test that base bus components (i2c, spi, uart) are skipped.""" + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # i2c component (should be skipped as it's a base bus component) + i2c_dir = tests_dir / "i2c" + i2c_dir.mkdir(parents=True) + (i2c_dir / "test.esp32-idf.yaml").write_text("test: i2c") + + # wifi component (should not be skipped) + wifi_dir = tests_dir / "wifi" + wifi_dir.mkdir(parents=True) + (wifi_dir / "test.esp32-idf.yaml").write_text("test: wifi") + + # Mock changed_files to return both i2c and wifi + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/i2c/i2c.cpp", + "esphome/components/wifi/wifi.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should only include wifi, not i2c + assert result["should_run"] == "true" + assert result["components"] == ["wifi"] + assert "i2c" not in result["components"] + + +def test_detect_memory_impact_config_with_variant_tests(tmp_path: Path) -> None: + """Test memory impact detection for components with only variant test files. + + This verifies that memory impact analysis works correctly for components like + improv_serial, ethernet, mdns, etc. which only have variant test files + (test-*.yaml) instead of base test files (test.*.yaml). + """ + # Create test directory structure + tests_dir = tmp_path / "tests" / "components" + + # improv_serial with only variant tests + improv_serial_dir = tests_dir / "improv_serial" + improv_serial_dir.mkdir(parents=True) + (improv_serial_dir / "test-uart0.esp32-idf.yaml").write_text("test: improv") + (improv_serial_dir / "test-uart0.esp8266-ard.yaml").write_text("test: improv") + (improv_serial_dir / "test-usb_cdc.esp32-s2-idf.yaml").write_text("test: improv") + + # ethernet with only variant tests + ethernet_dir = tests_dir / "ethernet" + ethernet_dir.mkdir(parents=True) + (ethernet_dir / "test-manual_ip.esp32-idf.yaml").write_text("test: ethernet") + (ethernet_dir / "test-dhcp.esp32-c3-idf.yaml").write_text("test: ethernet") + + # Mock changed_files to return both components + with ( + patch.object(determine_jobs, "root_path", str(tmp_path)), + patch.object(helpers, "root_path", str(tmp_path)), + patch.object(determine_jobs, "changed_files") as mock_changed_files, + ): + mock_changed_files.return_value = [ + "esphome/components/improv_serial/improv_serial.cpp", + "esphome/components/ethernet/ethernet.cpp", + ] + determine_jobs._component_has_tests.cache_clear() + + result = determine_jobs.detect_memory_impact_config() + + # Should detect both components even though they only have variant tests + assert result["should_run"] == "true" + assert set(result["components"]) == {"improv_serial", "ethernet"} + # Both components support esp32-idf + assert result["platform"] == "esp32-idf" + assert result["use_merged_config"] == "true" + + +# Tests for clang-tidy split mode logic + + +def test_clang_tidy_mode_full_scan( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that full scan (hash changed) always uses split mode.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Mock changed_files to return no component files + mock_changed_files.return_value = [] + + # Mock full scan (hash changed) + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True), + patch.object(determine_jobs, "get_changed_components", return_value=[]), + patch.object( + determine_jobs, "filter_component_and_test_files", return_value=False + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=[] + ), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + # Full scan should always use split mode + assert output["clang_tidy_mode"] == "split" + + +@pytest.mark.parametrize( + ("component_count", "files_per_component", "expected_mode"), + [ + # Small PR: 5 files in 1 component -> nosplit + (1, 5, "nosplit"), + # Medium PR: 30 files in 2 components -> nosplit + (2, 15, "nosplit"), + # Medium PR: 64 files total -> nosplit (just under threshold) + (2, 32, "nosplit"), + # Large PR: 65 files total -> split (at threshold) + (2, 33, "split"), # 2 * 33 = 66 files + # Large PR: 100 files in 10 components -> split + (10, 10, "split"), + ], + ids=[ + "1_comp_5_files_nosplit", + "2_comp_30_files_nosplit", + "2_comp_64_files_nosplit_under_threshold", + "2_comp_66_files_split_at_threshold", + "10_comp_100_files_split", + ], +) +def test_clang_tidy_mode_targeted_scan( + component_count: int, + files_per_component: int, + expected_mode: str, + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test clang-tidy mode selection based on files_to_check count.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = False + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = False + mock_should_run_python_linters.return_value = False + + # Create component names + components = [f"comp{i}" for i in range(component_count)] + + # Mock changed_files to return component files + mock_changed_files.return_value = [ + f"esphome/components/{comp}/file.cpp" for comp in components + ] + + # Mock git_ls_files to return files for each component + cpp_files = { + f"esphome/components/{comp}/file{i}.cpp": 0 + for comp in components + for i in range(files_per_component) + } + + # Create a mock that returns the cpp_files dict for any call + def mock_git_ls_files(patterns=None): + return cpp_files + + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files), + patch.object(determine_jobs, "get_changed_components", return_value=components), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, "get_components_with_dependencies", return_value=components + ), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + assert output["clang_tidy_mode"] == expected_mode + + +def test_main_core_files_changed_still_detects_components( + mock_should_run_integration_tests: Mock, + mock_should_run_clang_tidy: Mock, + mock_should_run_clang_format: Mock, + mock_should_run_python_linters: Mock, + mock_changed_files: Mock, + mock_determine_cpp_unit_tests: Mock, + capsys: pytest.CaptureFixture[str], + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that component changes are detected even when core files change.""" + monkeypatch.delenv("GITHUB_ACTIONS", raising=False) + + mock_should_run_integration_tests.return_value = True + mock_should_run_clang_tidy.return_value = True + mock_should_run_clang_format.return_value = True + mock_should_run_python_linters.return_value = True + mock_determine_cpp_unit_tests.return_value = (True, []) + + mock_changed_files.return_value = [ + "esphome/core/helpers.h", + "esphome/components/select/select_traits.h", + "esphome/components/select/select_traits.cpp", + "esphome/components/api/api.proto", + ] + + with ( + patch("sys.argv", ["determine-jobs.py"]), + patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False), + patch.object(determine_jobs, "get_changed_components", return_value=None), + patch.object( + determine_jobs, + "filter_component_and_test_files", + side_effect=lambda f: f.startswith("esphome/components/"), + ), + patch.object( + determine_jobs, + "get_components_with_dependencies", + side_effect=lambda files, deps: ( + ["select", "api"] + if not deps + else ["select", "api", "bluetooth_proxy", "logger"] + ), + ), + ): + determine_jobs.main() + + captured = capsys.readouterr() + output = json.loads(captured.out) + + assert output["clang_tidy"] is True + assert output["clang_tidy_mode"] == "split" + assert "select" in output["changed_components"] + assert "api" in output["changed_components"] + assert len(output["changed_components"]) > 0 diff --git a/tests/test_build_components/build_components_base.esp8266-ard.yaml b/tests/test_build_components/build_components_base.esp8266-ard.yaml index e4d6607c86..1e2d614392 100644 --- a/tests/test_build_components/build_components_base.esp8266-ard.yaml +++ b/tests/test_build_components/build_components_base.esp8266-ard.yaml @@ -3,7 +3,7 @@ esphome: friendly_name: $component_name esp8266: - board: d1_mini + board: d1_mini_pro logger: level: VERY_VERBOSE diff --git a/tests/unit_tests/core/test_config.py b/tests/unit_tests/core/test_config.py index 4fddfc9678..a1e4627dc9 100644 --- a/tests/unit_tests/core/test_config.py +++ b/tests/unit_tests/core/test_config.py @@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No mock_cg.add_global.assert_not_called() +def test_include_file_with_c_header( + tmp_path: Path, mock_copy_file_if_changed: Mock +) -> None: + """Test include_file wraps header in extern C block when is_c_header is True.""" + src_file = tmp_path / "c_library.h" + src_file.write_text("// C library header") + + CORE.build_path = tmp_path / "build" + + with patch("esphome.core.config.cg") as mock_cg: + # Mock RawStatement to capture the text + mock_raw_statement = MagicMock() + mock_raw_statement.text = "" + + def raw_statement_side_effect(text): + mock_raw_statement.text = text + return mock_raw_statement + + mock_cg.RawStatement.side_effect = raw_statement_side_effect + + config.include_file(src_file, Path("c_library.h"), is_c_header=True) + + mock_copy_file_if_changed.assert_called_once() + mock_cg.add_global.assert_called_once() + # Check that include statement is wrapped in extern "C" block + assert 'extern "C"' in mock_raw_statement.text + assert '#include "c_library.h"' in mock_raw_statement.text + + def test_get_usable_cpu_count() -> None: """Test get_usable_cpu_count returns CPU count.""" count = config.get_usable_cpu_count() diff --git a/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml b/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml index 443cba144e..1a51fc44cf 100644 --- a/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml +++ b/tests/unit_tests/fixtures/substitutions/02-expressions.approved.yaml @@ -8,6 +8,7 @@ substitutions: area: 25 numberOne: 1 var1: 79 + double_width: 14 test_list: - The area is 56 - 56 @@ -25,3 +26,4 @@ test_list: - ord("a") = 97 - chr(97) = a - len([1,2,3]) = 3 + - width = 7, double_width = 14 diff --git a/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml b/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml index 07ad992f1f..4612f581b5 100644 --- a/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml +++ b/tests/unit_tests/fixtures/substitutions/02-expressions.input.yaml @@ -8,6 +8,7 @@ substitutions: area: 25 numberOne: 1 var1: 79 + double_width: ${width * 2} test_list: - "The area is ${width * height}" @@ -23,3 +24,4 @@ test_list: - ord("a") = ${ ord("a") } - chr(97) = ${ chr(97) } - len([1,2,3]) = ${ len([1,2,3]) } + - width = ${width}, double_width = ${double_width} diff --git a/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml b/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml new file mode 100644 index 0000000000..a479370f4b --- /dev/null +++ b/tests/unit_tests/fixtures/substitutions/05-extend-remove.approved.yaml @@ -0,0 +1,9 @@ +substitutions: + A: component1 + B: component2 + C: component3 +some_component: + - id: component1 + value: 2 + - id: component2 + value: 5 diff --git a/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml b/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml new file mode 100644 index 0000000000..2e0e60798d --- /dev/null +++ b/tests/unit_tests/fixtures/substitutions/05-extend-remove.input.yaml @@ -0,0 +1,22 @@ +substitutions: + A: component1 + B: component2 + C: component3 + +packages: + - some_component: + - id: component1 + value: 1 + - id: !extend ${B} + value: 4 + - id: !extend ${B} + value: 5 + - id: component3 + value: 6 + +some_component: + - id: !extend ${A} + value: 2 + - id: component2 + value: 3 + - id: !remove ${C} diff --git a/tests/unit_tests/test_core.py b/tests/unit_tests/test_core.py index edf055ca73..378a226dc2 100644 --- a/tests/unit_tests/test_core.py +++ b/tests/unit_tests/test_core.py @@ -571,9 +571,11 @@ class TestEsphomeCore: assert target.address == "4.3.2.1" def test_address__openthread(self, target): - target.name = "test-device" target.config = {} - target.config[const.CONF_OPENTHREAD] = {} + target.config[const.CONF_OPENTHREAD] = { + const.CONF_USE_ADDRESS: "test-device.local" + } + target.name = "test-device" assert target.address == "test-device.local" diff --git a/tests/unit_tests/test_main.py b/tests/unit_tests/test_main.py index 73dfe359f0..9119c88502 100644 --- a/tests/unit_tests/test_main.py +++ b/tests/unit_tests/test_main.py @@ -17,10 +17,12 @@ from esphome import platformio_api from esphome.__main__ import ( Purpose, choose_upload_log_host, + command_analyze_memory, command_clean_all, command_rename, command_update_all, command_wizard, + detect_external_components, get_port_type, has_ip_address, has_mqtt, @@ -226,13 +228,47 @@ def mock_run_external_process() -> Generator[Mock]: @pytest.fixture -def mock_run_external_command() -> Generator[Mock]: - """Mock run_external_command for testing.""" +def mock_run_external_command_main() -> Generator[Mock]: + """Mock run_external_command in __main__ module (different from platformio_api).""" with patch("esphome.__main__.run_external_command") as mock: mock.return_value = 0 # Default to success yield mock +@pytest.fixture +def mock_write_cpp() -> Generator[Mock]: + """Mock write_cpp for testing.""" + with patch("esphome.__main__.write_cpp") as mock: + mock.return_value = 0 # Default to success + yield mock + + +@pytest.fixture +def mock_compile_program() -> Generator[Mock]: + """Mock compile_program for testing.""" + with patch("esphome.__main__.compile_program") as mock: + mock.return_value = 0 # Default to success + yield mock + + +@pytest.fixture +def mock_get_esphome_components() -> Generator[Mock]: + """Mock get_esphome_components for testing.""" + with patch("esphome.analyze_memory.helpers.get_esphome_components") as mock: + mock.return_value = {"logger", "api", "ota"} + yield mock + + +@pytest.fixture +def mock_memory_analyzer_cli() -> Generator[Mock]: + """Mock MemoryAnalyzerCLI for testing.""" + with patch("esphome.analyze_memory.cli.MemoryAnalyzerCLI") as mock_class: + mock_analyzer = MagicMock() + mock_analyzer.generate_report.return_value = "Mock Memory Report" + mock_class.return_value = mock_analyzer + yield mock_class + + def test_choose_upload_log_host_with_string_default() -> None: """Test with a single string default device.""" setup_core() @@ -839,7 +875,7 @@ def test_upload_program_serial_esp8266_with_file( def test_upload_using_esptool_path_conversion( tmp_path: Path, - mock_run_external_command: Mock, + mock_run_external_command_main: Mock, mock_get_idedata: Mock, ) -> None: """Test upload_using_esptool properly converts Path objects to strings for esptool. @@ -875,10 +911,10 @@ def test_upload_using_esptool_path_conversion( assert result == 0 # Verify that run_external_command was called - assert mock_run_external_command.call_count == 1 + assert mock_run_external_command_main.call_count == 1 # Get the actual call arguments - call_args = mock_run_external_command.call_args[0] + call_args = mock_run_external_command_main.call_args[0] # The first argument should be esptool.main function, # followed by the command arguments @@ -917,7 +953,7 @@ def test_upload_using_esptool_path_conversion( def test_upload_using_esptool_with_file_path( tmp_path: Path, - mock_run_external_command: Mock, + mock_run_external_command_main: Mock, ) -> None: """Test upload_using_esptool with a custom file that's a Path object.""" setup_core(platform=PLATFORM_ESP8266, tmp_path=tmp_path, name="test") @@ -934,10 +970,10 @@ def test_upload_using_esptool_with_file_path( assert result == 0 # Verify that run_external_command was called - mock_run_external_command.assert_called_once() + mock_run_external_command_main.assert_called_once() # Get the actual call arguments - call_args = mock_run_external_command.call_args[0] + call_args = mock_run_external_command_main.call_args[0] cmd_list = list(call_args[1:]) # Skip the esptool.main function # Find the firmware path in the command @@ -2273,3 +2309,226 @@ def test_show_logs_api_mqtt_timeout_fallback( # Verify run_logs was called with only the static IP (MQTT failed) mock_run_logs.assert_called_once_with(CORE.config, ["192.168.1.100"]) + + +def test_detect_external_components_no_external( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components with no external components.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + "api": {}, + } + + result = detect_external_components(config) + + assert result == set() + mock_get_esphome_components.assert_called_once() + + +def test_detect_external_components_with_external( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components detects external components.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, # Built-in + "api": {}, # Built-in + "my_custom_sensor": {}, # External + "another_custom": {}, # External + "external_components": [], # Special key, not a component + "substitutions": {}, # Special key, not a component + } + + result = detect_external_components(config) + + assert result == {"my_custom_sensor", "another_custom"} + mock_get_esphome_components.assert_called_once() + + +def test_detect_external_components_filters_special_keys( + mock_get_esphome_components: Mock, +) -> None: + """Test detect_external_components filters out special config keys.""" + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "substitutions": {"key": "value"}, + "packages": {}, + "globals": [], + "external_components": [], + "<<": {}, # YAML merge key + } + + result = detect_external_components(config) + + assert result == set() + mock_get_esphome_components.assert_called_once() + + +def test_command_analyze_memory_success( + tmp_path: Path, + capfd: CaptureFixture[str], + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, + mock_get_esphome_components: Mock, + mock_memory_analyzer_cli: Mock, +) -> None: + """Test command_analyze_memory with successful compilation and analysis.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + # Create firmware.elf file + firmware_path = ( + tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device" + ) + firmware_path.mkdir(parents=True, exist_ok=True) + firmware_elf = firmware_path / "firmware.elf" + firmware_elf.write_text("mock elf file") + + # Mock idedata + mock_idedata_obj = MagicMock(spec=platformio_api.IDEData) + mock_idedata_obj.firmware_elf_path = str(firmware_elf) + mock_idedata_obj.objdump_path = "/path/to/objdump" + mock_idedata_obj.readelf_path = "/path/to/readelf" + mock_get_idedata.return_value = mock_idedata_obj + + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + } + + args = MockArgs() + + result = command_analyze_memory(args, config) + + assert result == 0 + + # Verify compilation was done + mock_write_cpp.assert_called_once_with(config) + mock_compile_program.assert_called_once_with(args, config) + + # Verify analyzer was created with correct parameters + mock_memory_analyzer_cli.assert_called_once_with( + str(firmware_elf), + "/path/to/objdump", + "/path/to/readelf", + set(), # No external components + ) + + # Verify analysis was run + mock_analyzer = mock_memory_analyzer_cli.return_value + mock_analyzer.analyze.assert_called_once() + mock_analyzer.generate_report.assert_called_once() + + # Verify report was printed + captured = capfd.readouterr() + assert "Mock Memory Report" in captured.out + + +def test_command_analyze_memory_with_external_components( + tmp_path: Path, + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, + mock_get_esphome_components: Mock, + mock_memory_analyzer_cli: Mock, +) -> None: + """Test command_analyze_memory detects external components.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + # Create firmware.elf file + firmware_path = ( + tmp_path / ".esphome" / "build" / "test_device" / ".pioenvs" / "test_device" + ) + firmware_path.mkdir(parents=True, exist_ok=True) + firmware_elf = firmware_path / "firmware.elf" + firmware_elf.write_text("mock elf file") + + # Mock idedata + mock_idedata_obj = MagicMock(spec=platformio_api.IDEData) + mock_idedata_obj.firmware_elf_path = str(firmware_elf) + mock_idedata_obj.objdump_path = "/path/to/objdump" + mock_idedata_obj.readelf_path = "/path/to/readelf" + mock_get_idedata.return_value = mock_idedata_obj + + config = { + CONF_ESPHOME: {CONF_NAME: "test_device"}, + "logger": {}, + "my_custom_component": {"param": "value"}, # External component + "external_components": [{"source": "github://user/repo"}], # Not a component + } + + args = MockArgs() + + result = command_analyze_memory(args, config) + + assert result == 0 + + # Verify analyzer was created with external components detected + mock_memory_analyzer_cli.assert_called_once_with( + str(firmware_elf), + "/path/to/objdump", + "/path/to/readelf", + {"my_custom_component"}, # External component detected + ) + + +def test_command_analyze_memory_write_cpp_fails( + tmp_path: Path, + mock_write_cpp: Mock, +) -> None: + """Test command_analyze_memory when write_cpp fails.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_write_cpp.return_value = 1 # Failure + + result = command_analyze_memory(args, config) + + assert result == 1 + mock_write_cpp.assert_called_once_with(config) + + +def test_command_analyze_memory_compile_fails( + tmp_path: Path, + mock_write_cpp: Mock, + mock_compile_program: Mock, +) -> None: + """Test command_analyze_memory when compilation fails.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_compile_program.return_value = 1 # Compilation failed + + result = command_analyze_memory(args, config) + + assert result == 1 + mock_write_cpp.assert_called_once_with(config) + mock_compile_program.assert_called_once_with(args, config) + + +def test_command_analyze_memory_no_idedata( + tmp_path: Path, + caplog: pytest.LogCaptureFixture, + mock_write_cpp: Mock, + mock_compile_program: Mock, + mock_get_idedata: Mock, +) -> None: + """Test command_analyze_memory when idedata cannot be retrieved.""" + setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device") + + config = {CONF_ESPHOME: {CONF_NAME: "test_device"}} + args = MockArgs() + + mock_get_idedata.return_value = None # Failed to get idedata + + with caplog.at_level(logging.ERROR): + result = command_analyze_memory(args, config) + + assert result == 1 + assert "Failed to get IDE data for memory analysis" in caplog.text diff --git a/tests/unit_tests/test_platformio_api.py b/tests/unit_tests/test_platformio_api.py index 07948cc6ad..13ef3516e4 100644 --- a/tests/unit_tests/test_platformio_api.py +++ b/tests/unit_tests/test_platformio_api.py @@ -387,6 +387,42 @@ def test_idedata_addr2line_path_unix(setup_core: Path) -> None: assert result == "/usr/bin/addr2line" +def test_idedata_objdump_path_windows(setup_core: Path) -> None: + """Test IDEData.objdump_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "C:\\tools\\objdump.exe" + + +def test_idedata_objdump_path_unix(setup_core: Path) -> None: + """Test IDEData.objdump_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.objdump_path + assert result == "/usr/bin/objdump" + + +def test_idedata_readelf_path_windows(setup_core: Path) -> None: + """Test IDEData.readelf_path on Windows.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "C:\\tools\\gcc.exe"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "C:\\tools\\readelf.exe" + + +def test_idedata_readelf_path_unix(setup_core: Path) -> None: + """Test IDEData.readelf_path on Unix.""" + raw_data = {"prog_path": "/path/to/firmware.elf", "cc_path": "/usr/bin/gcc"} + idedata = platformio_api.IDEData(raw_data) + + result = idedata.readelf_path + assert result == "/usr/bin/readelf" + + def test_patch_structhash(setup_core: Path) -> None: """Test patch_structhash monkey patches platformio functions.""" # Create simple namespace objects to act as modules diff --git a/tests/unit_tests/test_substitutions.py b/tests/unit_tests/test_substitutions.py index 59396a4a83..beb1ebc73e 100644 --- a/tests/unit_tests/test_substitutions.py +++ b/tests/unit_tests/test_substitutions.py @@ -4,6 +4,7 @@ from pathlib import Path from esphome import config as config_module, yaml_util from esphome.components import substitutions +from esphome.config import resolve_extend_remove from esphome.config_helpers import merge_config from esphome.const import CONF_PACKAGES, CONF_SUBSTITUTIONS from esphome.core import CORE @@ -81,6 +82,8 @@ def test_substitutions_fixtures(fixture_path): substitutions.do_substitution_pass(config, None) + resolve_extend_remove(config) + # Also load expected using ESPHome's loader, or use {} if missing and DEV_MODE if expected_path.is_file(): expected = yaml_util.load_yaml(expected_path)