mirror of
https://github.com/esphome/esphome.git
synced 2025-11-01 23:51:47 +00:00
Compare commits
136 Commits
dependabot
...
20251001-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
820a897c1b | ||
|
|
2523d83255 | ||
|
|
84b69a5766 | ||
|
|
acef2085d9 | ||
|
|
aba0ad1210 | ||
|
|
865663ce5f | ||
|
|
ae010fd6f1 | ||
|
|
91a10d0e36 | ||
|
|
d5c36eaf2a | ||
|
|
e70cb098ae | ||
|
|
7f2d8a2c11 | ||
|
|
4f4da1de22 | ||
|
|
f9807db08a | ||
|
|
541fb8b27c | ||
|
|
85e0a4fbf9 | ||
|
|
7e54803ede | ||
|
|
a078486a87 | ||
|
|
ba18bb6a4f | ||
|
|
07ad32968e | ||
|
|
0b077bdfc6 | ||
|
|
1f00617738 | ||
|
|
9cf1fd24fd | ||
|
|
bbd636a8cc | ||
|
|
322dc530a9 | ||
|
|
0b09e50685 | ||
|
|
a96cc5e6f2 | ||
|
|
9a4288d81a | ||
|
|
b95999aca7 | ||
|
|
c70937ed01 | ||
|
|
3151606d50 | ||
|
|
5080698c3a | ||
|
|
931e3f80f0 | ||
|
|
85f1019d90 | ||
|
|
cd93f7f55a | ||
|
|
d98b00f56d | ||
|
|
8fd43f1d96 | ||
|
|
0475ec5533 | ||
|
|
6fe5a0c736 | ||
|
|
1ec9383abe | ||
|
|
558d4eb9dd | ||
|
|
c6ecfd0c55 | ||
|
|
3b8b2c0754 | ||
|
|
f5d69a2539 | ||
|
|
29b9073d62 | ||
|
|
a45e94cd06 | ||
|
|
71f2fb8353 | ||
|
|
0fcae15c25 | ||
|
|
a1d6bac21a | ||
|
|
db69ce24ae | ||
|
|
293400ee14 | ||
|
|
57bf3f968f | ||
|
|
922c2bcd5a | ||
|
|
5e9b972831 | ||
|
|
3bc0041b94 | ||
|
|
daa03e5b3c | ||
|
|
62ce39e430 | ||
|
|
a9e5e4d6d2 | ||
|
|
bfeade1e2b | ||
|
|
95a0c9594f | ||
|
|
8762d7cf0e | ||
|
|
84316d62f9 | ||
|
|
e1e047c53f | ||
|
|
b0ada914bc | ||
|
|
e2101f5a20 | ||
|
|
b134d42e3b | ||
|
|
f87c969b43 | ||
|
|
f011c44130 | ||
|
|
843f590db4 | ||
|
|
2c86ebaf7f | ||
|
|
25fe4a1476 | ||
|
|
86c12079b4 | ||
|
|
79aafe2cd5 | ||
|
|
a5d6e39b2f | ||
|
|
a78a7dfa4e | ||
|
|
7879df4dd1 | ||
|
|
43c62297e8 | ||
|
|
5049c7227d | ||
|
|
256d3b119b | ||
|
|
6d2c700c43 | ||
|
|
9d081795e8 | ||
|
|
59848a2c8a | ||
|
|
c7c408e667 | ||
|
|
acfa325f23 | ||
|
|
cb97271704 | ||
|
|
b3b65316f0 | ||
|
|
b61cec8e77 | ||
|
|
24243fb22c | ||
|
|
ba6c8c87c2 | ||
|
|
f5774cc138 | ||
|
|
6d09e68b2e | ||
|
|
fe9db75c27 | ||
|
|
2b832e9ee8 | ||
|
|
661e9f9991 | ||
|
|
39e23c323d | ||
|
|
bdfbac0301 | ||
|
|
9646653e57 | ||
|
|
c6c202e4f7 | ||
|
|
62f73c768e | ||
|
|
cd1215347e | ||
|
|
b8353b3117 | ||
|
|
5d3574c81f | ||
|
|
364e5ffd79 | ||
|
|
c38c2a1daf | ||
|
|
070b0882b8 | ||
|
|
7e2ccb7bc3 | ||
|
|
68d57b6bc0 | ||
|
|
7f1173fcba | ||
|
|
a75ccf841c | ||
|
|
56eb605ec9 | ||
|
|
2c4818de00 | ||
|
|
2b94de8732 | ||
|
|
f71aed3a5c | ||
|
|
353e097085 | ||
|
|
14d76e9e4e | ||
|
|
f2e0a412db | ||
|
|
6943b1d985 | ||
|
|
18062d154f | ||
|
|
2b0b82b2fb | ||
|
|
3e1c8f37c5 | ||
|
|
236ca12d3e | ||
|
|
42f1b61e31 | ||
|
|
708f8a95e5 | ||
|
|
10ca86ae8d | ||
|
|
22056e0809 | ||
|
|
fe4857fabb | ||
|
|
3054c2bc29 | ||
|
|
b190f37ae7 | ||
|
|
8e6ee2bed1 | ||
|
|
354f46f7c0 | ||
|
|
7b6acd3c00 | ||
|
|
11f5f7683c | ||
|
|
5da589abd0 | ||
|
|
daa39a489d | ||
|
|
3bb95a190d | ||
|
|
25a6202bb9 | ||
|
|
c4eeed7f7e |
@@ -1,4 +1,5 @@
|
||||
[run]
|
||||
omit =
|
||||
esphome/components/*
|
||||
esphome/analyze_memory/*
|
||||
tests/integration/*
|
||||
|
||||
353
.github/workflows/ci.yml
vendored
353
.github/workflows/ci.yml
vendored
@@ -175,6 +175,7 @@ jobs:
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
memory_impact: ${{ steps.determine.outputs.memory-impact }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -204,6 +205,7 @@ jobs:
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
name: Run integration tests
|
||||
@@ -379,7 +381,16 @@ jobs:
|
||||
|
||||
# Use intelligent splitter that groups components with same bus configs
|
||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
|
||||
# Only isolate directly changed components when targeting dev branch
|
||||
# For beta/release branches, group everything for faster CI
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed='[]'
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
else
|
||||
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
|
||||
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components"
|
||||
fi
|
||||
|
||||
echo "Splitting components intelligently..."
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
|
||||
@@ -396,7 +407,7 @@ jobs:
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: ${{ (github.base_ref == 'beta' || github.base_ref == 'release') && 8 || 4 }}
|
||||
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
@@ -424,18 +435,31 @@ jobs:
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Use /mnt for build files (70GB available vs ~29GB on /)
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
# Check if /mnt has more free space than / before bind mounting
|
||||
# Extract available space in KB for comparison
|
||||
root_avail=$(df -k / | awk 'NR==2 {print $4}')
|
||||
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
|
||||
|
||||
# Only use /mnt if it has more space than /
|
||||
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
|
||||
echo "Using /mnt for build files (more space available)"
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
else
|
||||
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
|
||||
fi
|
||||
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
@@ -448,7 +472,7 @@ jobs:
|
||||
# - This catches pin conflicts and other issues in directly changed code
|
||||
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
|
||||
# - Dependencies are safe to group since they weren't modified in this PR
|
||||
if [ "${{ github.base_ref }}" = "beta" ] || [ "${{ github.base_ref }}" = "release" ]; then
|
||||
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
|
||||
directly_changed_csv=""
|
||||
echo "Testing components: $components_csv"
|
||||
echo "Target branch: ${{ github.base_ref }} - grouping all components"
|
||||
@@ -459,6 +483,11 @@ jobs:
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Show disk space before validation (after bind mounts setup)
|
||||
echo "Disk space before config validation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping and isolation
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
@@ -466,6 +495,11 @@ jobs:
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Show disk space before compilation
|
||||
echo "Disk space before compilation:"
|
||||
df -h
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping and isolation
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
|
||||
|
||||
@@ -474,7 +508,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- common
|
||||
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
|
||||
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -489,6 +523,292 @@ jobs:
|
||||
- uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
|
||||
if: always()
|
||||
|
||||
memory-impact-target-branch:
|
||||
name: Build target branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
|
||||
skip: ${{ steps.check-script.outputs.skip }}
|
||||
steps:
|
||||
- name: Check out target branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
|
||||
# Check if memory impact extraction script exists on target branch
|
||||
# If not, skip the analysis (this handles older branches that don't have the feature)
|
||||
- name: Check for memory impact script
|
||||
id: check-script
|
||||
run: |
|
||||
if [ -f "script/ci_memory_impact_extract.py" ]; then
|
||||
echo "skip=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis"
|
||||
fi
|
||||
|
||||
# All remaining steps only run if script exists
|
||||
- name: Generate cache key
|
||||
id: cache-key
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
# Get the commit SHA of the target branch
|
||||
target_sha=$(git rev-parse HEAD)
|
||||
|
||||
# Hash the build infrastructure files (all files that affect build/analysis)
|
||||
infra_hash=$(cat \
|
||||
script/test_build_components.py \
|
||||
script/ci_memory_impact_extract.py \
|
||||
script/analyze_component_buses.py \
|
||||
script/merge_component_configs.py \
|
||||
script/ci_helpers.py \
|
||||
.github/workflows/ci.yml \
|
||||
| sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Get platform and components from job inputs
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1)
|
||||
|
||||
# Combine into cache key
|
||||
cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}"
|
||||
echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT
|
||||
echo "Cache key: ${cache_key}"
|
||||
|
||||
- name: Restore cached memory analysis
|
||||
id: cache-memory-analysis
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Cache status
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then
|
||||
echo "✓ Cache hit! Using cached memory analysis results."
|
||||
echo " Skipping build step to save time."
|
||||
else
|
||||
echo "✗ Cache miss. Will build and analyze memory usage."
|
||||
fi
|
||||
|
||||
- name: Restore Python
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Cache platformio
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
|
||||
- name: Build, compile, and analyze memory
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||
id: build
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-target.json
|
||||
|
||||
- name: Save memory analysis to cache
|
||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: memory-analysis-target.json
|
||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||
|
||||
- name: Extract memory usage for outputs
|
||||
id: extract
|
||||
if: steps.check-script.outputs.skip != 'true'
|
||||
run: |
|
||||
if [ -f memory-analysis-target.json ]; then
|
||||
ram=$(jq -r '.ram_bytes' memory-analysis-target.json)
|
||||
flash=$(jq -r '.flash_bytes' memory-analysis-target.json)
|
||||
echo "ram_usage=${ram}" >> $GITHUB_OUTPUT
|
||||
echo "flash_usage=${flash}" >> $GITHUB_OUTPUT
|
||||
echo "RAM: ${ram} bytes, Flash: ${flash} bytes"
|
||||
else
|
||||
echo "Error: memory-analysis-target.json not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: memory-analysis-target.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-pr-branch:
|
||||
name: Build PR branch for memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
|
||||
outputs:
|
||||
ram_usage: ${{ steps.extract.outputs.ram_usage }}
|
||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||
steps:
|
||||
- name: Check out PR branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Cache platformio
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||
- name: Build, compile, and analyze memory
|
||||
id: extract
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
|
||||
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
|
||||
|
||||
echo "Building with test_build_components.py for $platform with components:"
|
||||
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
|
||||
|
||||
# Use test_build_components.py which handles grouping automatically
|
||||
# Pass components as comma-separated list
|
||||
component_list=$(echo "$components" | jq -r 'join(",")')
|
||||
|
||||
echo "Compiling with test_build_components.py..."
|
||||
|
||||
# Run build and extract memory with auto-detection of build directory for detailed analysis
|
||||
# Use tee to show output in CI while also piping to extraction script
|
||||
python script/test_build_components.py \
|
||||
-e compile \
|
||||
-c "$component_list" \
|
||||
-t "$platform" 2>&1 | \
|
||||
tee /dev/stderr | \
|
||||
python script/ci_memory_impact_extract.py \
|
||||
--output-env \
|
||||
--output-json memory-analysis-pr.json
|
||||
- name: Upload memory analysis JSON
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: memory-analysis-pr.json
|
||||
if-no-files-found: warn
|
||||
retention-days: 1
|
||||
|
||||
memory-impact-comment:
|
||||
name: Comment memory impact
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Download target analysis JSON
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-target
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Download PR analysis JSON
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: memory-analysis-pr
|
||||
path: ./memory-analysis
|
||||
continue-on-error: true
|
||||
- name: Post or update PR comment
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}
|
||||
PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}
|
||||
TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }}
|
||||
TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }}
|
||||
PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }}
|
||||
PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }}
|
||||
TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
||||
# Check if analysis JSON files exist
|
||||
target_json_arg=""
|
||||
pr_json_arg=""
|
||||
|
||||
if [ -f ./memory-analysis/memory-analysis-target.json ]; then
|
||||
echo "Found target analysis JSON"
|
||||
target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json"
|
||||
else
|
||||
echo "No target analysis JSON found"
|
||||
fi
|
||||
|
||||
if [ -f ./memory-analysis/memory-analysis-pr.json ]; then
|
||||
echo "Found PR analysis JSON"
|
||||
pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json"
|
||||
else
|
||||
echo "No PR analysis JSON found"
|
||||
fi
|
||||
|
||||
# Add cache flag if target was cached
|
||||
cache_flag=""
|
||||
if [ "$TARGET_CACHE_HIT" == "true" ]; then
|
||||
cache_flag="--target-cache-hit"
|
||||
fi
|
||||
|
||||
python script/ci_memory_impact_comment.py \
|
||||
--pr-number "${{ github.event.pull_request.number }}" \
|
||||
--components "$COMPONENTS" \
|
||||
--platform "$PLATFORM" \
|
||||
--target-ram "$TARGET_RAM" \
|
||||
--target-flash "$TARGET_FLASH" \
|
||||
--pr-ram "$PR_RAM" \
|
||||
--pr-flash "$PR_FLASH" \
|
||||
$target_json_arg \
|
||||
$pr_json_arg \
|
||||
$cache_flag
|
||||
|
||||
ci-status:
|
||||
name: CI Status
|
||||
runs-on: ubuntu-24.04
|
||||
@@ -503,6 +823,9 @@ jobs:
|
||||
- test-build-components-splitter
|
||||
- test-build-components-split
|
||||
- pre-commit-ci-lite
|
||||
- memory-impact-target-branch
|
||||
- memory-impact-pr-branch
|
||||
- memory-impact-comment
|
||||
if: always()
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.0
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -62,6 +62,7 @@ esphome/components/bedjet/fan/* @jhansche
|
||||
esphome/components/bedjet/sensor/* @javawizard @jhansche
|
||||
esphome/components/beken_spi_led_strip/* @Mat931
|
||||
esphome/components/bh1750/* @OttoWinter
|
||||
esphome/components/bh1900nux/* @B48D81EFCC
|
||||
esphome/components/binary_sensor/* @esphome/core
|
||||
esphome/components/bk72xx/* @kuba2k2
|
||||
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
|
||||
|
||||
@@ -117,6 +117,17 @@ class Purpose(StrEnum):
|
||||
LOGGING = "logging"
|
||||
|
||||
|
||||
class PortType(StrEnum):
|
||||
SERIAL = "SERIAL"
|
||||
NETWORK = "NETWORK"
|
||||
MQTT = "MQTT"
|
||||
MQTTIP = "MQTTIP"
|
||||
|
||||
|
||||
# Magic MQTT port types that require special handling
|
||||
_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
|
||||
|
||||
|
||||
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
|
||||
"""Resolve an address using cache if available, otherwise return the address itself."""
|
||||
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
|
||||
@@ -280,16 +291,67 @@ def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str
|
||||
return mqtt.get_esphome_device_ip(config, username, password, client_id)
|
||||
|
||||
|
||||
_PORT_TO_PORT_TYPE = {
|
||||
"MQTT": "MQTT",
|
||||
"MQTTIP": "MQTTIP",
|
||||
}
|
||||
def _resolve_network_devices(
|
||||
devices: list[str], config: ConfigType, args: ArgsProtocol
|
||||
) -> list[str]:
|
||||
"""Resolve device list, converting MQTT magic strings to actual IP addresses.
|
||||
|
||||
This function filters the devices list to:
|
||||
- Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
|
||||
- Deduplicate addresses while preserving order
|
||||
- Only resolve MQTT once even if multiple MQTT strings are present
|
||||
- If MQTT resolution fails, log a warning and continue with other devices
|
||||
|
||||
Args:
|
||||
devices: List of device identifiers (IPs, hostnames, or magic strings)
|
||||
config: ESPHome configuration
|
||||
args: Command-line arguments containing MQTT credentials
|
||||
|
||||
Returns:
|
||||
List of network addresses suitable for connection attempts
|
||||
"""
|
||||
network_devices: list[str] = []
|
||||
mqtt_resolved: bool = False
|
||||
|
||||
for device in devices:
|
||||
port_type = get_port_type(device)
|
||||
if port_type in _MQTT_PORT_TYPES:
|
||||
# Only resolve MQTT once, even if multiple MQTT entries
|
||||
if not mqtt_resolved:
|
||||
try:
|
||||
mqtt_ips = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
network_devices.extend(mqtt_ips)
|
||||
except EsphomeError as err:
|
||||
_LOGGER.warning(
|
||||
"MQTT IP discovery failed (%s), will try other devices if available",
|
||||
err,
|
||||
)
|
||||
mqtt_resolved = True
|
||||
elif device not in network_devices:
|
||||
# Regular network address or IP - add if not already present
|
||||
network_devices.append(device)
|
||||
|
||||
return network_devices
|
||||
|
||||
|
||||
def get_port_type(port: str) -> str:
|
||||
def get_port_type(port: str) -> PortType:
|
||||
"""Determine the type of port/device identifier.
|
||||
|
||||
Returns:
|
||||
PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
|
||||
PortType.MQTT for MQTT logging
|
||||
PortType.MQTTIP for MQTT IP lookup
|
||||
PortType.NETWORK for IP addresses, hostnames, or mDNS names
|
||||
"""
|
||||
if port.startswith("/") or port.startswith("COM"):
|
||||
return "SERIAL"
|
||||
return _PORT_TO_PORT_TYPE.get(port, "NETWORK")
|
||||
return PortType.SERIAL
|
||||
if port == "MQTT":
|
||||
return PortType.MQTT
|
||||
if port == "MQTTIP":
|
||||
return PortType.MQTTIP
|
||||
return PortType.NETWORK
|
||||
|
||||
|
||||
def run_miniterm(config: ConfigType, port: str, args) -> int:
|
||||
@@ -404,7 +466,9 @@ def write_cpp_file() -> int:
|
||||
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
from esphome import platformio_api
|
||||
|
||||
_LOGGER.info("Compiling app...")
|
||||
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
|
||||
# If you change this format, update the regex in that script as well
|
||||
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
|
||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||
if rc != 0:
|
||||
return rc
|
||||
@@ -489,7 +553,7 @@ def upload_using_platformio(config: ConfigType, port: str):
|
||||
|
||||
|
||||
def check_permissions(port: str):
|
||||
if os.name == "posix" and get_port_type(port) == "SERIAL":
|
||||
if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
|
||||
# Check if we can open selected serial port
|
||||
if not os.access(port, os.F_OK):
|
||||
raise EsphomeError(
|
||||
@@ -517,7 +581,7 @@ def upload_program(
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if get_port_type(host) == "SERIAL":
|
||||
if get_port_type(host) == PortType.SERIAL:
|
||||
check_permissions(host)
|
||||
|
||||
exit_code = 1
|
||||
@@ -544,17 +608,16 @@ def upload_program(
|
||||
from esphome import espota2
|
||||
|
||||
remote_port = int(ota_conf[CONF_PORT])
|
||||
password = ota_conf.get(CONF_PASSWORD, "")
|
||||
password = ota_conf.get(CONF_PASSWORD)
|
||||
if getattr(args, "file", None) is not None:
|
||||
binary = Path(args.file)
|
||||
else:
|
||||
binary = CORE.firmware_bin
|
||||
|
||||
# MQTT address resolution
|
||||
if get_port_type(host) in ("MQTT", "MQTTIP"):
|
||||
devices = mqtt_get_ip(config, args.username, args.password, args.client_id)
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
network_devices = _resolve_network_devices(devices, config, args)
|
||||
|
||||
return espota2.run_ota(devices, remote_port, password, binary)
|
||||
return espota2.run_ota(network_devices, remote_port, password, binary)
|
||||
|
||||
|
||||
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
|
||||
@@ -569,33 +632,22 @@ def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int
|
||||
raise EsphomeError("Logger is not configured!")
|
||||
|
||||
port = devices[0]
|
||||
port_type = get_port_type(port)
|
||||
|
||||
if get_port_type(port) == "SERIAL":
|
||||
if port_type == PortType.SERIAL:
|
||||
check_permissions(port)
|
||||
return run_miniterm(config, port, args)
|
||||
|
||||
port_type = get_port_type(port)
|
||||
|
||||
# Check if we should use API for logging
|
||||
if has_api():
|
||||
addresses_to_use: list[str] | None = None
|
||||
# Resolve MQTT magic strings to actual IP addresses
|
||||
if has_api() and (
|
||||
network_devices := _resolve_network_devices(devices, config, args)
|
||||
):
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
if port_type == "NETWORK":
|
||||
# Network addresses (IPs, mDNS names, or regular DNS hostnames) can be used
|
||||
# The resolve_ip_address() function in helpers.py handles all types
|
||||
addresses_to_use = devices
|
||||
elif port_type in ("MQTT", "MQTTIP") and has_mqtt_ip_lookup():
|
||||
# Use MQTT IP lookup for MQTT/MQTTIP types
|
||||
addresses_to_use = mqtt_get_ip(
|
||||
config, args.username, args.password, args.client_id
|
||||
)
|
||||
return run_logs(config, network_devices)
|
||||
|
||||
if addresses_to_use is not None:
|
||||
from esphome.components.api.client import run_logs
|
||||
|
||||
return run_logs(config, addresses_to_use)
|
||||
|
||||
if port_type in ("NETWORK", "MQTT") and has_mqtt_logging():
|
||||
if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
|
||||
from esphome import mqtt
|
||||
|
||||
return mqtt.show_logs(
|
||||
|
||||
502
esphome/analyze_memory/__init__.py
Normal file
502
esphome/analyze_memory/__init__.py
Normal file
@@ -0,0 +1,502 @@
|
||||
"""Memory usage analyzer for ESPHome compiled binaries."""
|
||||
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .const import (
|
||||
CORE_SUBCATEGORY_PATTERNS,
|
||||
DEMANGLED_PATTERNS,
|
||||
ESPHOME_COMPONENT_PATTERN,
|
||||
SECTION_TO_ATTR,
|
||||
SYMBOL_PATTERNS,
|
||||
)
|
||||
from .helpers import (
|
||||
get_component_class_patterns,
|
||||
get_esphome_components,
|
||||
map_section_name,
|
||||
parse_symbol_line,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# GCC global constructor/destructor prefix annotations
|
||||
_GCC_PREFIX_ANNOTATIONS = {
|
||||
"_GLOBAL__sub_I_": "global constructor for",
|
||||
"_GLOBAL__sub_D_": "global destructor for",
|
||||
}
|
||||
|
||||
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
|
||||
_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
|
||||
|
||||
# C++ runtime patterns for categorization
|
||||
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
|
||||
|
||||
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
|
||||
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
|
||||
|
||||
# Regex pattern for parsing readelf section headers
|
||||
# Format: [ #] name type addr off size
|
||||
_READELF_SECTION_PATTERN = re.compile(
|
||||
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
|
||||
)
|
||||
|
||||
# Component category prefixes
|
||||
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
|
||||
_COMPONENT_PREFIX_EXTERNAL = "[external]"
|
||||
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
|
||||
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
|
||||
|
||||
# C++ namespace prefixes
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
_NAMESPACE_STD = "std::"
|
||||
|
||||
# Type alias for symbol information: (symbol_name, size, component)
|
||||
SymbolInfoType = tuple[str, int, str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class MemorySection:
|
||||
"""Represents a memory section with its symbols."""
|
||||
|
||||
name: str
|
||||
symbols: list[SymbolInfoType] = field(default_factory=list)
|
||||
total_size: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class ComponentMemory:
|
||||
"""Tracks memory usage for a component."""
|
||||
|
||||
name: str
|
||||
text_size: int = 0 # Code in flash
|
||||
rodata_size: int = 0 # Read-only data in flash
|
||||
data_size: int = 0 # Initialized data (flash + ram)
|
||||
bss_size: int = 0 # Uninitialized data (ram only)
|
||||
symbol_count: int = 0
|
||||
|
||||
@property
|
||||
def flash_total(self) -> int:
|
||||
"""Total flash usage (text + rodata + data)."""
|
||||
return self.text_size + self.rodata_size + self.data_size
|
||||
|
||||
@property
|
||||
def ram_total(self) -> int:
|
||||
"""Total RAM usage (data + bss)."""
|
||||
return self.data_size + self.bss_size
|
||||
|
||||
|
||||
class MemoryAnalyzer:
|
||||
"""Analyzes memory usage from ELF files."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
external_components: set[str] | None = None,
|
||||
idedata: "IDEData | None" = None,
|
||||
) -> None:
|
||||
"""Initialize memory analyzer.
|
||||
|
||||
Args:
|
||||
elf_path: Path to ELF file to analyze
|
||||
objdump_path: Path to objdump binary (auto-detected from idedata if not provided)
|
||||
readelf_path: Path to readelf binary (auto-detected from idedata if not provided)
|
||||
external_components: Set of external component names
|
||||
idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths
|
||||
"""
|
||||
self.elf_path = Path(elf_path)
|
||||
if not self.elf_path.exists():
|
||||
raise FileNotFoundError(f"ELF file not found: {elf_path}")
|
||||
|
||||
# Auto-detect toolchain paths from idedata if not provided
|
||||
if idedata is not None and (objdump_path is None or readelf_path is None):
|
||||
objdump_path = objdump_path or idedata.objdump_path
|
||||
readelf_path = readelf_path or idedata.readelf_path
|
||||
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
|
||||
|
||||
self.objdump_path = objdump_path or "objdump"
|
||||
self.readelf_path = readelf_path or "readelf"
|
||||
self.external_components = external_components or set()
|
||||
|
||||
self.sections: dict[str, MemorySection] = {}
|
||||
self.components: dict[str, ComponentMemory] = defaultdict(
|
||||
lambda: ComponentMemory("")
|
||||
)
|
||||
self._demangle_cache: dict[str, str] = {}
|
||||
self._uncategorized_symbols: list[tuple[str, str, int]] = []
|
||||
self._esphome_core_symbols: list[
|
||||
tuple[str, str, int]
|
||||
] = [] # Track core symbols
|
||||
self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
) # Track symbols for all components
|
||||
|
||||
def analyze(self) -> dict[str, ComponentMemory]:
|
||||
"""Analyze the ELF file and return component memory usage."""
|
||||
self._parse_sections()
|
||||
self._parse_symbols()
|
||||
self._categorize_symbols()
|
||||
return dict(self.components)
|
||||
|
||||
def _parse_sections(self) -> None:
|
||||
"""Parse section headers from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.readelf_path, "-S", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Parse section headers
|
||||
for line in result.stdout.splitlines():
|
||||
# Look for section entries
|
||||
if not (match := _READELF_SECTION_PATTERN.match(line)):
|
||||
continue
|
||||
|
||||
section_name = match.group(1)
|
||||
size_hex = match.group(2)
|
||||
size = int(size_hex, 16)
|
||||
|
||||
# Map to standard section name
|
||||
mapped_section = map_section_name(section_name)
|
||||
if not mapped_section:
|
||||
continue
|
||||
|
||||
if mapped_section not in self.sections:
|
||||
self.sections[mapped_section] = MemorySection(mapped_section)
|
||||
self.sections[mapped_section].total_size += size
|
||||
|
||||
def _parse_symbols(self) -> None:
|
||||
"""Parse symbols from ELF file."""
|
||||
result = subprocess.run(
|
||||
[self.objdump_path, "-t", str(self.elf_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
# Track seen addresses to avoid duplicates
|
||||
seen_addresses: set[str] = set()
|
||||
|
||||
for line in result.stdout.splitlines():
|
||||
if not (symbol_info := parse_symbol_line(line)):
|
||||
continue
|
||||
|
||||
section, name, size, address = symbol_info
|
||||
|
||||
# Skip duplicate symbols at the same address (e.g., C1/C2 constructors)
|
||||
if address in seen_addresses or section not in self.sections:
|
||||
continue
|
||||
|
||||
self.sections[section].symbols.append((name, size, ""))
|
||||
seen_addresses.add(address)
|
||||
|
||||
def _categorize_symbols(self) -> None:
|
||||
"""Categorize symbols by component."""
|
||||
# First, collect all unique symbol names for batch demangling
|
||||
all_symbols = {
|
||||
symbol_name
|
||||
for section in self.sections.values()
|
||||
for symbol_name, _, _ in section.symbols
|
||||
}
|
||||
|
||||
# Batch demangle all symbols at once
|
||||
self._batch_demangle_symbols(list(all_symbols))
|
||||
|
||||
# Now categorize with cached demangled names
|
||||
for section_name, section in self.sections.items():
|
||||
for symbol_name, size, _ in section.symbols:
|
||||
component = self._identify_component(symbol_name)
|
||||
|
||||
if component not in self.components:
|
||||
self.components[component] = ComponentMemory(component)
|
||||
|
||||
comp_mem = self.components[component]
|
||||
comp_mem.symbol_count += 1
|
||||
|
||||
# Update the appropriate size attribute based on section
|
||||
if attr_name := SECTION_TO_ATTR.get(section_name):
|
||||
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
|
||||
|
||||
# Track uncategorized symbols
|
||||
if component == "other" and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._uncategorized_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track ESPHome core symbols for detailed analysis
|
||||
if component == _COMPONENT_CORE and size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._esphome_core_symbols.append((symbol_name, demangled, size))
|
||||
|
||||
# Track all component symbols for detailed analysis
|
||||
if size > 0:
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
self._component_symbols[component].append(
|
||||
(symbol_name, demangled, size)
|
||||
)
|
||||
|
||||
def _identify_component(self, symbol_name: str) -> str:
|
||||
"""Identify which component a symbol belongs to."""
|
||||
# Demangle C++ names if needed
|
||||
demangled = self._demangle_symbol(symbol_name)
|
||||
|
||||
# Check for special component classes first (before namespace pattern)
|
||||
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# Check for special component classes that include component name in the class
|
||||
# For example: esphome::ESPHomeOTAComponent -> ota component
|
||||
for component_name in get_esphome_components():
|
||||
patterns = get_component_class_patterns(component_name)
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
|
||||
# Check for ESPHome component namespaces
|
||||
match = ESPHOME_COMPONENT_PATTERN.search(demangled)
|
||||
if match:
|
||||
component_name = match.group(1)
|
||||
# Strip trailing underscore if present (e.g., switch_ -> switch)
|
||||
component_name = component_name.rstrip("_")
|
||||
|
||||
# Check if this is an actual component in the components directory
|
||||
if component_name in get_esphome_components():
|
||||
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||
# Check if this is a known external component from the config
|
||||
if component_name in self.external_components:
|
||||
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
|
||||
# Everything else in esphome:: namespace is core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check for esphome core namespace (no component namespace)
|
||||
if _NAMESPACE_ESPHOME in demangled:
|
||||
# If no component match found, it's core
|
||||
return _COMPONENT_CORE
|
||||
|
||||
# Check against symbol patterns
|
||||
for component, patterns in SYMBOL_PATTERNS.items():
|
||||
if any(pattern in symbol_name for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Check against demangled patterns
|
||||
for component, patterns in DEMANGLED_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return component
|
||||
|
||||
# Special cases that need more complex logic
|
||||
|
||||
# Check if spi_flash vs spi_driver
|
||||
if "spi_" in symbol_name or "SPI" in symbol_name:
|
||||
return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
|
||||
|
||||
# libc special printf variants
|
||||
if (
|
||||
symbol_name.startswith("_")
|
||||
and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
|
||||
in _LIBC_PRINTF_SCANF_FAMILY
|
||||
):
|
||||
return "libc"
|
||||
|
||||
# Track uncategorized symbols for analysis
|
||||
return "other"
|
||||
|
||||
def _batch_demangle_symbols(self, symbols: list[str]) -> None:
|
||||
"""Batch demangle C++ symbol names for efficiency."""
|
||||
if not symbols:
|
||||
return
|
||||
|
||||
# Try to find the appropriate c++filt for the platform
|
||||
cppfilt_cmd = "c++filt"
|
||||
|
||||
_LOGGER.info("Demangling %d symbols", len(symbols))
|
||||
_LOGGER.debug("objdump_path = %s", self.objdump_path)
|
||||
|
||||
# Check if we have a toolchain-specific c++filt
|
||||
if self.objdump_path and self.objdump_path != "objdump":
|
||||
# Replace objdump with c++filt in the path
|
||||
potential_cppfilt = self.objdump_path.replace("objdump", "c++filt")
|
||||
_LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt)
|
||||
if Path(potential_cppfilt).exists():
|
||||
cppfilt_cmd = potential_cppfilt
|
||||
_LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd)
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"✗ Toolchain c++filt not found at %s, using system c++filt",
|
||||
potential_cppfilt,
|
||||
)
|
||||
else:
|
||||
_LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path)
|
||||
|
||||
# Strip GCC optimization suffixes and prefixes before demangling
|
||||
# Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt
|
||||
# Prefixes like _GLOBAL__sub_I_ need to be removed and tracked
|
||||
symbols_stripped: list[str] = []
|
||||
symbols_prefixes: list[str] = [] # Track removed prefixes
|
||||
for symbol in symbols:
|
||||
# Remove GCC optimization markers
|
||||
stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
|
||||
|
||||
# Handle GCC global constructor/initializer prefixes
|
||||
# _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling
|
||||
prefix = ""
|
||||
for gcc_prefix in _GCC_PREFIX_ANNOTATIONS:
|
||||
if stripped.startswith(gcc_prefix):
|
||||
prefix = gcc_prefix
|
||||
stripped = stripped[len(prefix) :]
|
||||
break
|
||||
|
||||
symbols_stripped.append(stripped)
|
||||
symbols_prefixes.append(prefix)
|
||||
|
||||
try:
|
||||
# Send all symbols to c++filt at once
|
||||
result = subprocess.run(
|
||||
[cppfilt_cmd],
|
||||
input="\n".join(symbols_stripped),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e:
|
||||
# On error, cache originals
|
||||
_LOGGER.warning("Failed to batch demangle symbols: %s", e)
|
||||
for symbol in symbols:
|
||||
self._demangle_cache[symbol] = symbol
|
||||
return
|
||||
|
||||
if result.returncode != 0:
|
||||
_LOGGER.warning(
|
||||
"c++filt exited with code %d: %s",
|
||||
result.returncode,
|
||||
result.stderr[:200] if result.stderr else "(no error output)",
|
||||
)
|
||||
# Cache originals on failure
|
||||
for symbol in symbols:
|
||||
self._demangle_cache[symbol] = symbol
|
||||
return
|
||||
|
||||
# Process demangled output
|
||||
self._process_demangled_output(
|
||||
symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd
|
||||
)
|
||||
|
||||
def _process_demangled_output(
|
||||
self,
|
||||
symbols: list[str],
|
||||
symbols_stripped: list[str],
|
||||
symbols_prefixes: list[str],
|
||||
demangled_output: str,
|
||||
cppfilt_cmd: str,
|
||||
) -> None:
|
||||
"""Process demangled symbol output and populate cache.
|
||||
|
||||
Args:
|
||||
symbols: Original symbol names
|
||||
symbols_stripped: Stripped symbol names sent to c++filt
|
||||
symbols_prefixes: Removed prefixes to restore
|
||||
demangled_output: Output from c++filt
|
||||
cppfilt_cmd: Path to c++filt command (for logging)
|
||||
"""
|
||||
demangled_lines = demangled_output.strip().split("\n")
|
||||
failed_count = 0
|
||||
|
||||
for original, stripped, prefix, demangled in zip(
|
||||
symbols, symbols_stripped, symbols_prefixes, demangled_lines
|
||||
):
|
||||
# Add back any prefix that was removed
|
||||
demangled = self._restore_symbol_prefix(prefix, stripped, demangled)
|
||||
|
||||
# If we stripped a suffix, add it back to the demangled name for clarity
|
||||
if original != stripped and not prefix:
|
||||
demangled = self._restore_symbol_suffix(original, demangled)
|
||||
|
||||
self._demangle_cache[original] = demangled
|
||||
|
||||
# Log symbols that failed to demangle (stayed the same as stripped version)
|
||||
if stripped == demangled and stripped.startswith("_Z"):
|
||||
failed_count += 1
|
||||
if failed_count <= 5: # Only log first 5 failures
|
||||
_LOGGER.warning("Failed to demangle: %s", original)
|
||||
|
||||
if failed_count == 0:
|
||||
_LOGGER.info("Successfully demangled all %d symbols", len(symbols))
|
||||
return
|
||||
|
||||
_LOGGER.warning(
|
||||
"Failed to demangle %d/%d symbols using %s",
|
||||
failed_count,
|
||||
len(symbols),
|
||||
cppfilt_cmd,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
|
||||
"""Restore prefix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_")
|
||||
stripped: Stripped symbol name
|
||||
demangled: Demangled symbol name
|
||||
|
||||
Returns:
|
||||
Demangled name with prefix restored/annotated
|
||||
"""
|
||||
if not prefix:
|
||||
return demangled
|
||||
|
||||
# Successfully demangled - add descriptive prefix
|
||||
if demangled != stripped and (
|
||||
annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix)
|
||||
):
|
||||
return f"[{annotation}: {demangled}]"
|
||||
|
||||
# Failed to demangle - restore original prefix
|
||||
return prefix + demangled
|
||||
|
||||
@staticmethod
|
||||
def _restore_symbol_suffix(original: str, demangled: str) -> str:
|
||||
"""Restore GCC optimization suffix that was removed before demangling.
|
||||
|
||||
Args:
|
||||
original: Original symbol name with suffix
|
||||
demangled: Demangled symbol name without suffix
|
||||
|
||||
Returns:
|
||||
Demangled name with suffix annotation
|
||||
"""
|
||||
if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
|
||||
return f"{demangled} [{suffix_match.group(1)}]"
|
||||
return demangled
|
||||
|
||||
def _demangle_symbol(self, symbol: str) -> str:
|
||||
"""Get demangled C++ symbol name from cache."""
|
||||
return self._demangle_cache.get(symbol, symbol)
|
||||
|
||||
def _categorize_esphome_core_symbol(self, demangled: str) -> str:
|
||||
"""Categorize ESPHome core symbols into subcategories."""
|
||||
# Special patterns that need to be checked separately
|
||||
if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
|
||||
return "C++ Runtime (vtables/RTTI)"
|
||||
|
||||
if demangled.startswith(_NAMESPACE_STD):
|
||||
return "C++ STL"
|
||||
|
||||
# Check against patterns from const.py
|
||||
for category, patterns in CORE_SUBCATEGORY_PATTERNS.items():
|
||||
if any(pattern in demangled for pattern in patterns):
|
||||
return category
|
||||
|
||||
return "Other Core"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from .cli import main
|
||||
|
||||
main()
|
||||
6
esphome/analyze_memory/__main__.py
Normal file
6
esphome/analyze_memory/__main__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Main entry point for running the memory analyzer as a module."""
|
||||
|
||||
from .cli import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
408
esphome/analyze_memory/cli.py
Normal file
408
esphome/analyze_memory/cli.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""CLI interface for memory analysis with report generation."""
|
||||
|
||||
from collections import defaultdict
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
_COMPONENT_API,
|
||||
_COMPONENT_CORE,
|
||||
_COMPONENT_PREFIX_ESPHOME,
|
||||
_COMPONENT_PREFIX_EXTERNAL,
|
||||
MemoryAnalyzer,
|
||||
)
|
||||
|
||||
|
||||
class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
"""Memory analyzer with CLI-specific report generation."""
|
||||
|
||||
# Column width constants
|
||||
COL_COMPONENT: int = 29
|
||||
COL_FLASH_TEXT: int = 14
|
||||
COL_FLASH_DATA: int = 14
|
||||
COL_RAM_DATA: int = 12
|
||||
COL_RAM_BSS: int = 12
|
||||
COL_TOTAL_FLASH: int = 15
|
||||
COL_TOTAL_RAM: int = 12
|
||||
COL_SEPARATOR: int = 3 # " | "
|
||||
|
||||
# Core analysis column widths
|
||||
COL_CORE_SUBCATEGORY: int = 30
|
||||
COL_CORE_SIZE: int = 12
|
||||
COL_CORE_COUNT: int = 6
|
||||
COL_CORE_PERCENT: int = 10
|
||||
|
||||
# Calculate table width once at class level
|
||||
TABLE_WIDTH: int = (
|
||||
COL_COMPONENT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_TEXT
|
||||
+ COL_SEPARATOR
|
||||
+ COL_FLASH_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_DATA
|
||||
+ COL_SEPARATOR
|
||||
+ COL_RAM_BSS
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_FLASH
|
||||
+ COL_SEPARATOR
|
||||
+ COL_TOTAL_RAM
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _make_separator_line(*widths: int) -> str:
|
||||
"""Create a separator line with given column widths.
|
||||
|
||||
Args:
|
||||
widths: Column widths to create separators for
|
||||
|
||||
Returns:
|
||||
Separator line like "----+---------+-----"
|
||||
"""
|
||||
return "-+-".join("-" * width for width in widths)
|
||||
|
||||
# Pre-computed separator lines
|
||||
MAIN_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_COMPONENT,
|
||||
COL_FLASH_TEXT,
|
||||
COL_FLASH_DATA,
|
||||
COL_RAM_DATA,
|
||||
COL_RAM_BSS,
|
||||
COL_TOTAL_FLASH,
|
||||
COL_TOTAL_RAM,
|
||||
)
|
||||
|
||||
CORE_TABLE_SEPARATOR: str = _make_separator_line(
|
||||
COL_CORE_SUBCATEGORY,
|
||||
COL_CORE_SIZE,
|
||||
COL_CORE_COUNT,
|
||||
COL_CORE_PERCENT,
|
||||
)
|
||||
|
||||
def generate_report(self, detailed: bool = False) -> str:
|
||||
"""Generate a formatted memory report."""
|
||||
components = sorted(
|
||||
self.components.items(), key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Calculate totals
|
||||
total_flash = sum(c.flash_total for _, c in components)
|
||||
total_ram = sum(c.ram_total for _, c in components)
|
||||
|
||||
# Build report
|
||||
lines: list[str] = []
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Main table - fixed column widths
|
||||
lines.append(
|
||||
f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}"
|
||||
)
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
|
||||
for name, mem in components:
|
||||
if mem.flash_total > 0 or mem.ram_total > 0:
|
||||
flash_rodata = mem.rodata_size + mem.data_size
|
||||
lines.append(
|
||||
f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | "
|
||||
f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | "
|
||||
f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
lines.append(self.MAIN_TABLE_SEPARATOR)
|
||||
lines.append(
|
||||
f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | "
|
||||
f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | "
|
||||
f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B"
|
||||
)
|
||||
|
||||
# Top consumers
|
||||
lines.append("")
|
||||
lines.append("Top Flash Consumers:")
|
||||
for i, (name, mem) in enumerate(components[:25]):
|
||||
if mem.flash_total > 0:
|
||||
percentage = (
|
||||
(mem.flash_total / total_flash * 100) if total_flash > 0 else 0
|
||||
)
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append("Top RAM Consumers:")
|
||||
ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True)
|
||||
for i, (name, mem) in enumerate(ram_components[:25]):
|
||||
if mem.ram_total > 0:
|
||||
percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0
|
||||
lines.append(
|
||||
f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
lines.append(
|
||||
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add ESPHome core detailed analysis if there are core symbols
|
||||
if self._esphome_core_symbols:
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(
|
||||
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
|
||||
)
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Group core symbols by subcategory
|
||||
core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict(
|
||||
list
|
||||
)
|
||||
|
||||
for symbol, demangled, size in self._esphome_core_symbols:
|
||||
# Categorize based on demangled name patterns
|
||||
subcategory = self._categorize_esphome_core_symbol(demangled)
|
||||
core_subcategories[subcategory].append((symbol, demangled, size))
|
||||
|
||||
# Sort subcategories by total size
|
||||
sorted_subcategories = sorted(
|
||||
[
|
||||
(name, symbols, sum(s[2] for s in symbols))
|
||||
for name, symbols in core_subcategories.items()
|
||||
],
|
||||
key=lambda x: x[2],
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
lines.append(
|
||||
f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | "
|
||||
f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}"
|
||||
)
|
||||
lines.append(self.CORE_TABLE_SEPARATOR)
|
||||
|
||||
core_total = sum(size for _, _, size in self._esphome_core_symbols)
|
||||
|
||||
for subcategory, symbols, total_size in sorted_subcategories:
|
||||
percentage = (total_size / core_total * 100) if core_total > 0 else 0
|
||||
lines.append(
|
||||
f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | "
|
||||
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
|
||||
)
|
||||
|
||||
# Top 15 largest core symbols
|
||||
lines.append("")
|
||||
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
|
||||
sorted_core_symbols = sorted(
|
||||
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
|
||||
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
# Add detailed analysis for top ESPHome and external components
|
||||
esphome_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
|
||||
]
|
||||
external_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
|
||||
]
|
||||
|
||||
top_esphome_components = sorted(
|
||||
esphome_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)[:30]
|
||||
|
||||
# Include all external components (they're usually important)
|
||||
top_external_components = sorted(
|
||||
external_components, key=lambda x: x[1].flash_total, reverse=True
|
||||
)
|
||||
|
||||
# Check if API component exists and ensure it's included
|
||||
api_component = None
|
||||
for name, mem in components:
|
||||
if name == _COMPONENT_API:
|
||||
api_component = (name, mem)
|
||||
break
|
||||
|
||||
# Combine all components to analyze: top ESPHome + all external + API if not already included
|
||||
components_to_analyze = list(top_esphome_components) + list(
|
||||
top_external_components
|
||||
)
|
||||
if api_component and api_component not in components_to_analyze:
|
||||
components_to_analyze.append(api_component)
|
||||
|
||||
if components_to_analyze:
|
||||
for comp_name, comp_mem in components_to_analyze:
|
||||
if not (comp_symbols := self._component_symbols.get(comp_name, [])):
|
||||
continue
|
||||
lines.append("")
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH))
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
lines.append("")
|
||||
|
||||
# Sort symbols by size
|
||||
sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True)
|
||||
|
||||
lines.append(f"Total symbols: {len(sorted_symbols)}")
|
||||
lines.append(f"Total size: {comp_mem.flash_total:,} B")
|
||||
lines.append("")
|
||||
|
||||
# Show all symbols > 100 bytes for better visibility
|
||||
large_symbols = [
|
||||
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
|
||||
]
|
||||
|
||||
lines.append(
|
||||
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
|
||||
)
|
||||
for i, (symbol, demangled, size) in enumerate(large_symbols):
|
||||
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
|
||||
|
||||
lines.append("=" * self.TABLE_WIDTH)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
|
||||
"""Dump uncategorized symbols for analysis."""
|
||||
# Sort by size descending
|
||||
sorted_symbols = sorted(
|
||||
self._uncategorized_symbols, key=lambda x: x[2], reverse=True
|
||||
)
|
||||
|
||||
lines = ["Uncategorized Symbols Analysis", "=" * 80]
|
||||
lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}")
|
||||
lines.append(
|
||||
f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes"
|
||||
)
|
||||
lines.append("")
|
||||
lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled")
|
||||
lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40)
|
||||
|
||||
for symbol, demangled, size in sorted_symbols[:100]: # Top 100
|
||||
demangled_display = (
|
||||
demangled[:100] if symbol != demangled else "[not demangled]"
|
||||
)
|
||||
lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}")
|
||||
|
||||
if len(sorted_symbols) > 100:
|
||||
lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols")
|
||||
|
||||
content = "\n".join(lines)
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
else:
|
||||
print(content)
|
||||
|
||||
|
||||
def analyze_elf(
|
||||
elf_path: str,
|
||||
objdump_path: str | None = None,
|
||||
readelf_path: str | None = None,
|
||||
detailed: bool = False,
|
||||
external_components: set[str] | None = None,
|
||||
) -> str:
|
||||
"""Analyze an ELF file and return a memory report."""
|
||||
analyzer = MemoryAnalyzerCLI(
|
||||
elf_path, objdump_path, readelf_path, external_components
|
||||
)
|
||||
analyzer.analyze()
|
||||
return analyzer.generate_report(detailed)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entrypoint for memory analysis."""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python -m esphome.analyze_memory <build_directory>")
|
||||
print("\nAnalyze memory usage from an ESPHome build directory.")
|
||||
print("The build directory should contain firmware.elf and idedata will be")
|
||||
print("loaded from ~/.esphome/.internal/idedata/<device>.json")
|
||||
print("\nExamples:")
|
||||
print(" python -m esphome.analyze_memory ~/.esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory .esphome/build/my-device")
|
||||
print(" python -m esphome.analyze_memory my-device # Short form")
|
||||
sys.exit(1)
|
||||
|
||||
build_dir = sys.argv[1]
|
||||
|
||||
# Load build directory
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
from esphome.platformio_api import IDEData
|
||||
|
||||
build_path = Path(build_dir)
|
||||
|
||||
# If no path separator in name, assume it's a device name
|
||||
if "/" not in build_dir and not build_path.is_dir():
|
||||
# Try current directory first
|
||||
cwd_path = Path.cwd() / ".esphome" / "build" / build_dir
|
||||
if cwd_path.is_dir():
|
||||
build_path = cwd_path
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
else:
|
||||
# Fall back to home directory
|
||||
build_path = Path.home() / ".esphome" / "build" / build_dir
|
||||
print(f"Using build directory: {build_path}", file=sys.stderr)
|
||||
|
||||
if not build_path.is_dir():
|
||||
print(f"Error: {build_path} is not a directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find firmware.elf
|
||||
elf_file = None
|
||||
for elf_candidate in [
|
||||
build_path / "firmware.elf",
|
||||
build_path / ".pioenvs" / build_path.name / "firmware.elf",
|
||||
]:
|
||||
if elf_candidate.exists():
|
||||
elf_file = str(elf_candidate)
|
||||
break
|
||||
|
||||
if not elf_file:
|
||||
print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Find idedata.json - check current directory first, then home
|
||||
device_name = build_path.name
|
||||
idedata_candidates = [
|
||||
Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
Path.home() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
]
|
||||
|
||||
idedata = None
|
||||
for idedata_path in idedata_candidates:
|
||||
if not idedata_path.exists():
|
||||
continue
|
||||
try:
|
||||
with open(idedata_path, encoding="utf-8") as f:
|
||||
raw_data = json.load(f)
|
||||
idedata = IDEData(raw_data)
|
||||
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
|
||||
break
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
|
||||
|
||||
if not idedata:
|
||||
print(
|
||||
f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata)
|
||||
analyzer.analyze()
|
||||
report = analyzer.generate_report()
|
||||
print(report)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
903
esphome/analyze_memory/const.py
Normal file
903
esphome/analyze_memory/const.py
Normal file
@@ -0,0 +1,903 @@
|
||||
"""Constants for memory analysis symbol pattern matching."""
|
||||
|
||||
import re
|
||||
|
||||
# Pattern to extract ESPHome component namespaces dynamically
|
||||
ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::")
|
||||
|
||||
# Section mapping for ELF file sections
|
||||
# Maps standard section names to their various platform-specific variants
|
||||
SECTION_MAPPING = {
|
||||
".text": frozenset([".text", ".iram"]),
|
||||
".rodata": frozenset([".rodata"]),
|
||||
".data": frozenset([".data", ".dram"]),
|
||||
".bss": frozenset([".bss"]),
|
||||
}
|
||||
|
||||
# Section to ComponentMemory attribute mapping
|
||||
# Maps section names to the attribute name in ComponentMemory dataclass
|
||||
SECTION_TO_ATTR = {
|
||||
".text": "text_size",
|
||||
".rodata": "rodata_size",
|
||||
".data": "data_size",
|
||||
".bss": "bss_size",
|
||||
}
|
||||
|
||||
# Component identification rules
|
||||
# Symbol patterns: patterns found in raw symbol names
|
||||
SYMBOL_PATTERNS = {
|
||||
"freertos": [
|
||||
"vTask",
|
||||
"xTask",
|
||||
"xQueue",
|
||||
"pvPort",
|
||||
"vPort",
|
||||
"uxTask",
|
||||
"pcTask",
|
||||
"prvTimerTask",
|
||||
"prvAddNewTaskToReadyList",
|
||||
"pxReadyTasksLists",
|
||||
"prvAddCurrentTaskToDelayedList",
|
||||
"xEventGroupWaitBits",
|
||||
"xRingbufferSendFromISR",
|
||||
"prvSendItemDoneNoSplit",
|
||||
"prvReceiveGeneric",
|
||||
"prvSendAcquireGeneric",
|
||||
"prvCopyItemAllowSplit",
|
||||
"xEventGroup",
|
||||
"xRingbuffer",
|
||||
"prvSend",
|
||||
"prvReceive",
|
||||
"prvCopy",
|
||||
"xPort",
|
||||
"ulTaskGenericNotifyTake",
|
||||
"prvIdleTask",
|
||||
"prvInitialiseNewTask",
|
||||
"prvIsYieldRequiredSMP",
|
||||
"prvGetItemByteBuf",
|
||||
"prvInitializeNewRingbuffer",
|
||||
"prvAcquireItemNoSplit",
|
||||
"prvNotifyQueueSetContainer",
|
||||
"ucStaticTimerQueueStorage",
|
||||
"eTaskGetState",
|
||||
"main_task",
|
||||
"do_system_init_fn",
|
||||
"xSemaphoreCreateGenericWithCaps",
|
||||
"vListInsert",
|
||||
"uxListRemove",
|
||||
"vRingbufferReturnItem",
|
||||
"vRingbufferReturnItemFromISR",
|
||||
"prvCheckItemFitsByteBuffer",
|
||||
"prvGetCurMaxSizeAllowSplit",
|
||||
"tick_hook",
|
||||
"sys_sem_new",
|
||||
"sys_arch_mbox_fetch",
|
||||
"sys_arch_sem_wait",
|
||||
"prvDeleteTCB",
|
||||
"vQueueDeleteWithCaps",
|
||||
"vRingbufferDeleteWithCaps",
|
||||
"vSemaphoreDeleteWithCaps",
|
||||
"prvCheckItemAvail",
|
||||
"prvCheckTaskCanBeScheduledSMP",
|
||||
"prvGetCurMaxSizeNoSplit",
|
||||
"prvResetNextTaskUnblockTime",
|
||||
"prvReturnItemByteBuf",
|
||||
"vApplicationStackOverflowHook",
|
||||
"vApplicationGetIdleTaskMemory",
|
||||
"sys_init",
|
||||
"sys_mbox_new",
|
||||
"sys_arch_mbox_tryfetch",
|
||||
],
|
||||
"xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"],
|
||||
"heap": ["heap_", "multi_heap"],
|
||||
"spi_flash": ["spi_flash"],
|
||||
"rtc": ["rtc_", "rtcio_ll_"],
|
||||
"gpio_driver": ["gpio_", "pins"],
|
||||
"uart_driver": ["uart", "_uart", "UART"],
|
||||
"timer": ["timer_", "esp_timer"],
|
||||
"peripherals": ["periph_", "periman"],
|
||||
"network_stack": [
|
||||
"vj_compress",
|
||||
"raw_sendto",
|
||||
"raw_input",
|
||||
"etharp_",
|
||||
"icmp_input",
|
||||
"socket_ipv6",
|
||||
"ip_napt",
|
||||
"socket_ipv4_multicast",
|
||||
"socket_ipv6_multicast",
|
||||
"netconn_",
|
||||
"recv_raw",
|
||||
"accept_function",
|
||||
"netconn_recv_data",
|
||||
"netconn_accept",
|
||||
"netconn_write_vectors_partly",
|
||||
"netconn_drain",
|
||||
"raw_connect",
|
||||
"raw_bind",
|
||||
"icmp_send_response",
|
||||
"sockets",
|
||||
"icmp_dest_unreach",
|
||||
"inet_chksum_pseudo",
|
||||
"alloc_socket",
|
||||
"done_socket",
|
||||
"set_global_fd_sets",
|
||||
"inet_chksum_pbuf",
|
||||
"tryget_socket_unconn_locked",
|
||||
"tryget_socket_unconn",
|
||||
"cs_create_ctrl_sock",
|
||||
"netbuf_alloc",
|
||||
],
|
||||
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
|
||||
"wifi_stack": [
|
||||
"ieee80211",
|
||||
"hostap",
|
||||
"sta_",
|
||||
"ap_",
|
||||
"scan_",
|
||||
"wifi_",
|
||||
"wpa_",
|
||||
"wps_",
|
||||
"esp_wifi",
|
||||
"cnx_",
|
||||
"wpa3_",
|
||||
"sae_",
|
||||
"wDev_",
|
||||
"ic_",
|
||||
"mac_",
|
||||
"esf_buf",
|
||||
"gWpaSm",
|
||||
"sm_WPA",
|
||||
"eapol_",
|
||||
"owe_",
|
||||
"wifiLowLevelInit",
|
||||
"s_do_mapping",
|
||||
"gScanStruct",
|
||||
"ppSearchTxframe",
|
||||
"ppMapWaitTxq",
|
||||
"ppFillAMPDUBar",
|
||||
"ppCheckTxConnTrafficIdle",
|
||||
"ppCalTkipMic",
|
||||
],
|
||||
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
|
||||
"wifi_bt_coex": ["coex"],
|
||||
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
|
||||
"bluedroid_bt": [
|
||||
"bluedroid",
|
||||
"btc_",
|
||||
"bta_",
|
||||
"btm_",
|
||||
"btu_",
|
||||
"BTM_",
|
||||
"GATT",
|
||||
"L2CA_",
|
||||
"smp_",
|
||||
"gatts_",
|
||||
"attp_",
|
||||
"l2cu_",
|
||||
"l2cb",
|
||||
"smp_cb",
|
||||
"BTA_GATTC_",
|
||||
"SMP_",
|
||||
"BTU_",
|
||||
"BTA_Dm",
|
||||
"GAP_Ble",
|
||||
"BT_tx_if",
|
||||
"host_recv_pkt_cb",
|
||||
"saved_local_oob_data",
|
||||
"string_to_bdaddr",
|
||||
"string_is_bdaddr",
|
||||
"CalConnectParamTimeout",
|
||||
"transmit_fragment",
|
||||
"transmit_data",
|
||||
"event_command_ready",
|
||||
"read_command_complete_header",
|
||||
"parse_read_local_extended_features_response",
|
||||
"parse_read_local_version_info_response",
|
||||
"should_request_high",
|
||||
"btdm_wakeup_request",
|
||||
"BTA_SetAttributeValue",
|
||||
"BTA_EnableBluetooth",
|
||||
"transmit_command_futured",
|
||||
"transmit_command",
|
||||
"get_waiting_command",
|
||||
"make_command",
|
||||
"transmit_downward",
|
||||
"host_recv_adv_packet",
|
||||
"copy_extra_byte_in_db",
|
||||
"parse_read_local_supported_commands_response",
|
||||
],
|
||||
"crypto_math": [
|
||||
"ecp_",
|
||||
"bignum_",
|
||||
"mpi_",
|
||||
"sswu",
|
||||
"modp",
|
||||
"dragonfly_",
|
||||
"gcm_mult",
|
||||
"__multiply",
|
||||
"quorem",
|
||||
"__mdiff",
|
||||
"__lshift",
|
||||
"__mprec_tens",
|
||||
"ECC_",
|
||||
"multiprecision_",
|
||||
"mix_sub_columns",
|
||||
"sbox",
|
||||
"gfm2_sbox",
|
||||
"gfm3_sbox",
|
||||
"curve_p256",
|
||||
"curve",
|
||||
"p_256_init_curve",
|
||||
"shift_sub_rows",
|
||||
"rshift",
|
||||
],
|
||||
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
|
||||
"libc": [
|
||||
"printf",
|
||||
"scanf",
|
||||
"malloc",
|
||||
"free",
|
||||
"memcpy",
|
||||
"memset",
|
||||
"strcpy",
|
||||
"strlen",
|
||||
"_dtoa",
|
||||
"_fopen",
|
||||
"__sfvwrite_r",
|
||||
"qsort",
|
||||
"__sf",
|
||||
"__sflush_r",
|
||||
"__srefill_r",
|
||||
"_impure_data",
|
||||
"_reclaim_reent",
|
||||
"_open_r",
|
||||
"strncpy",
|
||||
"_strtod_l",
|
||||
"__gethex",
|
||||
"__hexnan",
|
||||
"_setenv_r",
|
||||
"_tzset_unlocked_r",
|
||||
"__tzcalc_limits",
|
||||
"select",
|
||||
"scalbnf",
|
||||
"strtof",
|
||||
"strtof_l",
|
||||
"__d2b",
|
||||
"__b2d",
|
||||
"__s2b",
|
||||
"_Balloc",
|
||||
"__multadd",
|
||||
"__lo0bits",
|
||||
"__atexit0",
|
||||
"__smakebuf_r",
|
||||
"__swhatbuf_r",
|
||||
"_sungetc_r",
|
||||
"_close_r",
|
||||
"_link_r",
|
||||
"_unsetenv_r",
|
||||
"_rename_r",
|
||||
"__month_lengths",
|
||||
"tzinfo",
|
||||
"__ratio",
|
||||
"__hi0bits",
|
||||
"__ulp",
|
||||
"__any_on",
|
||||
"__copybits",
|
||||
"L_shift",
|
||||
"_fcntl_r",
|
||||
"_lseek_r",
|
||||
"_read_r",
|
||||
"_write_r",
|
||||
"_unlink_r",
|
||||
"_fstat_r",
|
||||
"access",
|
||||
"fsync",
|
||||
"tcsetattr",
|
||||
"tcgetattr",
|
||||
"tcflush",
|
||||
"tcdrain",
|
||||
"__ssrefill_r",
|
||||
"_stat_r",
|
||||
"__hexdig_fun",
|
||||
"__mcmp",
|
||||
"_fwalk_sglue",
|
||||
"__fpclassifyf",
|
||||
"_setlocale_r",
|
||||
"_mbrtowc_r",
|
||||
"fcntl",
|
||||
"__match",
|
||||
"_lock_close",
|
||||
"__c$",
|
||||
"__func__$",
|
||||
"__FUNCTION__$",
|
||||
"DAYS_IN_MONTH",
|
||||
"_DAYS_BEFORE_MONTH",
|
||||
"CSWTCH$",
|
||||
"dst$",
|
||||
"sulp",
|
||||
],
|
||||
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
|
||||
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
|
||||
"file_io": [
|
||||
"fread",
|
||||
"fwrite",
|
||||
"fopen",
|
||||
"fclose",
|
||||
"fseek",
|
||||
"ftell",
|
||||
"fflush",
|
||||
"s_fd_table",
|
||||
],
|
||||
"string_formatting": [
|
||||
"snprintf",
|
||||
"vsnprintf",
|
||||
"sprintf",
|
||||
"vsprintf",
|
||||
"sscanf",
|
||||
"vsscanf",
|
||||
],
|
||||
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
|
||||
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
|
||||
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
|
||||
"static_init": ["_GLOBAL__sub_I_"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"phy_radio": [
|
||||
"phy_",
|
||||
"rf_",
|
||||
"chip_",
|
||||
"register_chipv7",
|
||||
"pbus_",
|
||||
"bb_",
|
||||
"fe_",
|
||||
"rfcal_",
|
||||
"ram_rfcal",
|
||||
"tx_pwctrl",
|
||||
"rx_chan",
|
||||
"set_rx_gain",
|
||||
"set_chan",
|
||||
"agc_reg",
|
||||
"ram_txiq",
|
||||
"ram_txdc",
|
||||
"ram_gen_rx_gain",
|
||||
"rx_11b_opt",
|
||||
"set_rx_sense",
|
||||
"set_rx_gain_cal",
|
||||
"set_chan_dig_gain",
|
||||
"tx_pwctrl_init_cal",
|
||||
"rfcal_txiq",
|
||||
"set_tx_gain_table",
|
||||
"correct_rfpll_offset",
|
||||
"pll_correct_dcap",
|
||||
"txiq_cal_init",
|
||||
"pwdet_sar",
|
||||
"pwdet_sar2_init",
|
||||
"ram_iq_est_enable",
|
||||
"ram_rfpll_set_freq",
|
||||
"ant_wifirx_cfg",
|
||||
"ant_btrx_cfg",
|
||||
"force_txrxoff",
|
||||
"force_txrx_off",
|
||||
"tx_paon_set",
|
||||
"opt_11b_resart",
|
||||
"rfpll_1p2_opt",
|
||||
"ram_dc_iq_est",
|
||||
"ram_start_tx_tone",
|
||||
"ram_en_pwdet",
|
||||
"ram_cbw2040_cfg",
|
||||
"rxdc_est_min",
|
||||
"i2cmst_reg_init",
|
||||
"temprature_sens_read",
|
||||
"ram_restart_cal",
|
||||
"ram_write_gain_mem",
|
||||
"ram_wait_rfpll_cal_end",
|
||||
"txcal_debuge_mode",
|
||||
"ant_wifitx_cfg",
|
||||
"reg_init_begin",
|
||||
],
|
||||
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
|
||||
"wifi_lmac": ["lmac"],
|
||||
"wifi_device": ["wdev", "wDev_"],
|
||||
"power_mgmt": [
|
||||
"pm_",
|
||||
"sleep",
|
||||
"rtc_sleep",
|
||||
"light_sleep",
|
||||
"deep_sleep",
|
||||
"power_down",
|
||||
"g_pm",
|
||||
],
|
||||
"memory_mgmt": [
|
||||
"mem_",
|
||||
"memory_",
|
||||
"tlsf_",
|
||||
"memp_",
|
||||
"pbuf_",
|
||||
"pbuf_alloc",
|
||||
"pbuf_copy_partial_pbuf",
|
||||
],
|
||||
"hal_layer": ["hal_"],
|
||||
"clock_mgmt": [
|
||||
"clk_",
|
||||
"clock_",
|
||||
"rtc_clk",
|
||||
"apb_",
|
||||
"cpu_freq",
|
||||
"setCpuFrequencyMhz",
|
||||
],
|
||||
"cache_mgmt": ["cache"],
|
||||
"flash_ops": ["flash", "image_load"],
|
||||
"interrupt_handlers": [
|
||||
"isr",
|
||||
"interrupt",
|
||||
"intr_",
|
||||
"exc_",
|
||||
"exception",
|
||||
"port_IntStack",
|
||||
],
|
||||
"wrapper_functions": ["_wrapper"],
|
||||
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
|
||||
"authentication": ["auth"],
|
||||
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
|
||||
"dhcp": ["dhcp", "handle_dhcp"],
|
||||
"ethernet_phy": [
|
||||
"emac_",
|
||||
"eth_phy_",
|
||||
"phy_tlk110",
|
||||
"phy_lan87",
|
||||
"phy_ip101",
|
||||
"phy_rtl",
|
||||
"phy_dp83",
|
||||
"phy_ksz",
|
||||
"lan87xx_",
|
||||
"rtl8201_",
|
||||
"ip101_",
|
||||
"ksz80xx_",
|
||||
"jl1101_",
|
||||
"dp83848_",
|
||||
"eth_on_state_changed",
|
||||
],
|
||||
"threading": ["pthread_", "thread_", "_task_"],
|
||||
"pthread": ["pthread"],
|
||||
"synchronization": ["mutex", "semaphore", "spinlock", "portMUX"],
|
||||
"math_lib": [
|
||||
"sin",
|
||||
"cos",
|
||||
"tan",
|
||||
"sqrt",
|
||||
"pow",
|
||||
"exp",
|
||||
"log",
|
||||
"atan",
|
||||
"asin",
|
||||
"acos",
|
||||
"floor",
|
||||
"ceil",
|
||||
"fabs",
|
||||
"round",
|
||||
],
|
||||
"random": ["rand", "random", "rng_", "prng"],
|
||||
"time_lib": [
|
||||
"time",
|
||||
"clock",
|
||||
"gettimeofday",
|
||||
"settimeofday",
|
||||
"localtime",
|
||||
"gmtime",
|
||||
"mktime",
|
||||
"strftime",
|
||||
],
|
||||
"console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"],
|
||||
"rom_functions": ["r_", "rom_"],
|
||||
"compiler_runtime": [
|
||||
"__divdi3",
|
||||
"__udivdi3",
|
||||
"__moddi3",
|
||||
"__muldi3",
|
||||
"__ashldi3",
|
||||
"__ashrdi3",
|
||||
"__lshrdi3",
|
||||
"__cmpdi2",
|
||||
"__fixdfdi",
|
||||
"__floatdidf",
|
||||
],
|
||||
"libgcc": ["libgcc", "_divdi3", "_udivdi3"],
|
||||
"boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"],
|
||||
"bootloader": ["bootloader_", "esp_bootloader"],
|
||||
"app_framework": ["app_", "initArduino", "setup", "loop", "Update"],
|
||||
"weak_symbols": ["__weak_"],
|
||||
"compiler_builtins": ["__builtin_"],
|
||||
"vfs": ["vfs_", "VFS"],
|
||||
"esp32_sdk": ["esp32_", "esp32c", "esp32s"],
|
||||
"usb": ["usb_", "USB", "cdc_", "CDC"],
|
||||
"i2c_driver": ["i2c_", "I2C"],
|
||||
"i2s_driver": ["i2s_", "I2S"],
|
||||
"spi_driver": ["spi_", "SPI"],
|
||||
"adc_driver": ["adc_", "ADC"],
|
||||
"dac_driver": ["dac_", "DAC"],
|
||||
"touch_driver": ["touch_", "TOUCH"],
|
||||
"pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"],
|
||||
"rmt_driver": ["rmt_", "RMT"],
|
||||
"pcnt_driver": ["pcnt_", "PCNT"],
|
||||
"can_driver": ["can_", "CAN", "twai_", "TWAI"],
|
||||
"sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"],
|
||||
"temp_sensor": ["temp_sensor", "tsens_"],
|
||||
"watchdog": ["wdt_", "WDT", "watchdog"],
|
||||
"brownout": ["brownout", "bod_"],
|
||||
"ulp": ["ulp_", "ULP"],
|
||||
"psram": ["psram", "PSRAM", "spiram", "SPIRAM"],
|
||||
"efuse": ["efuse", "EFUSE"],
|
||||
"partition": ["partition", "esp_partition"],
|
||||
"esp_event": ["esp_event", "event_loop", "event_callback"],
|
||||
"esp_console": ["esp_console", "console_"],
|
||||
"chip_specific": ["chip_", "esp_chip"],
|
||||
"esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"],
|
||||
"ipc": ["esp_ipc", "ipc_"],
|
||||
"wifi_config": [
|
||||
"g_cnxMgr",
|
||||
"gChmCxt",
|
||||
"g_ic",
|
||||
"TxRxCxt",
|
||||
"s_dp",
|
||||
"s_ni",
|
||||
"s_reg_dump",
|
||||
"packet$",
|
||||
"d_mult_table",
|
||||
"K",
|
||||
"fcstab",
|
||||
],
|
||||
"smartconfig": ["sc_ack_send"],
|
||||
"rc_calibration": ["rc_cal", "rcUpdate"],
|
||||
"noise_floor": ["noise_check"],
|
||||
"rf_calibration": [
|
||||
"set_rx_sense",
|
||||
"set_rx_gain_cal",
|
||||
"set_chan_dig_gain",
|
||||
"tx_pwctrl_init_cal",
|
||||
"rfcal_txiq",
|
||||
"set_tx_gain_table",
|
||||
"correct_rfpll_offset",
|
||||
"pll_correct_dcap",
|
||||
"txiq_cal_init",
|
||||
"pwdet_sar",
|
||||
"rx_11b_opt",
|
||||
],
|
||||
"wifi_crypto": [
|
||||
"pk_use_ecparams",
|
||||
"process_segments",
|
||||
"ccmp_",
|
||||
"rc4_",
|
||||
"aria_",
|
||||
"mgf_mask",
|
||||
"dh_group",
|
||||
"ccmp_aad_nonce",
|
||||
"ccmp_encrypt",
|
||||
"rc4_skip",
|
||||
"aria_sb1",
|
||||
"aria_sb2",
|
||||
"aria_is1",
|
||||
"aria_is2",
|
||||
"aria_sl",
|
||||
"aria_a",
|
||||
],
|
||||
"radio_control": ["fsm_input", "fsm_sconfreq"],
|
||||
"pbuf": [
|
||||
"pbuf_",
|
||||
],
|
||||
"event_group": ["xEventGroup"],
|
||||
"ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"],
|
||||
"provisioning": ["prov_", "prov_stop_and_notify"],
|
||||
"scan": ["gScanStruct"],
|
||||
"port": ["xPort"],
|
||||
"elf_loader": [
|
||||
"elf_add",
|
||||
"elf_add_note",
|
||||
"elf_add_segment",
|
||||
"process_image",
|
||||
"read_encoded",
|
||||
"read_encoded_value",
|
||||
"read_encoded_value_with_base",
|
||||
"process_image_header",
|
||||
],
|
||||
"socket_api": [
|
||||
"sockets",
|
||||
"netconn_",
|
||||
"accept_function",
|
||||
"recv_raw",
|
||||
"socket_ipv4_multicast",
|
||||
"socket_ipv6_multicast",
|
||||
],
|
||||
"igmp": ["igmp_", "igmp_send", "igmp_input"],
|
||||
"icmp6": ["icmp6_"],
|
||||
"arp": ["arp_table"],
|
||||
"ampdu": [
|
||||
"ampdu_",
|
||||
"rcAmpdu",
|
||||
"trc_onAmpduOp",
|
||||
"rcAmpduLowerRate",
|
||||
"ampdu_dispatch_upto",
|
||||
],
|
||||
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
|
||||
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
|
||||
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
|
||||
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
|
||||
"trace": ["trc_init", "trc_onAmpduOp"],
|
||||
"country_code": ["country_info", "country_info_24ghz"],
|
||||
"multicore": ["do_multicore_settings"],
|
||||
"Update_lib": ["Update"],
|
||||
"stdio": [
|
||||
"__sf",
|
||||
"__sflush_r",
|
||||
"__srefill_r",
|
||||
"_impure_data",
|
||||
"_reclaim_reent",
|
||||
"_open_r",
|
||||
],
|
||||
"strncpy_ops": ["strncpy"],
|
||||
"math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"],
|
||||
"character_class": ["__chclass"],
|
||||
"camellia": ["camellia_", "camellia_feistel"],
|
||||
"crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"],
|
||||
"event_buffer": ["g_eb_list_desc", "eb_space"],
|
||||
"base_node": ["base_node_", "base_node_add_handler"],
|
||||
"file_descriptor": ["s_fd_table"],
|
||||
"tx_delay": ["tx_delay_cfg"],
|
||||
"deinit": ["deinit_functions"],
|
||||
"lcp_echo": ["LcpEchoCheck"],
|
||||
"raw_api": ["raw_bind", "raw_connect"],
|
||||
"checksum": ["process_checksum"],
|
||||
"entry_management": ["add_entry"],
|
||||
"esp_ota": ["esp_ota", "ota_", "read_otadata"],
|
||||
"http_server": [
|
||||
"httpd_",
|
||||
"parse_url_char",
|
||||
"cb_headers_complete",
|
||||
"delete_entry",
|
||||
"validate_structure",
|
||||
"config_save",
|
||||
"config_new",
|
||||
"verify_url",
|
||||
"cb_url",
|
||||
],
|
||||
"misc_system": [
|
||||
"alarm_cbs",
|
||||
"start_up",
|
||||
"tokens",
|
||||
"unhex",
|
||||
"osi_funcs_ro",
|
||||
"enum_function",
|
||||
"fragment_and_dispatch",
|
||||
"alarm_set",
|
||||
"osi_alarm_new",
|
||||
"config_set_string",
|
||||
"config_update_newest_section",
|
||||
"config_remove_key",
|
||||
"method_strings",
|
||||
"interop_match",
|
||||
"interop_database",
|
||||
"__state_table",
|
||||
"__action_table",
|
||||
"s_stub_table",
|
||||
"s_context",
|
||||
"s_mmu_ctx",
|
||||
"s_get_bus_mask",
|
||||
"hli_queue_put",
|
||||
"list_remove",
|
||||
"list_delete",
|
||||
"lock_acquire_generic",
|
||||
"is_vect_desc_usable",
|
||||
"io_mode_str",
|
||||
"__c$20233",
|
||||
"interface",
|
||||
"read_id_core",
|
||||
"subscribe_idle",
|
||||
"unsubscribe_idle",
|
||||
"s_clkout_handle",
|
||||
"lock_release_generic",
|
||||
"config_set_int",
|
||||
"config_get_int",
|
||||
"config_get_string",
|
||||
"config_has_key",
|
||||
"config_remove_section",
|
||||
"osi_alarm_init",
|
||||
"osi_alarm_deinit",
|
||||
"fixed_queue_enqueue",
|
||||
"fixed_queue_dequeue",
|
||||
"fixed_queue_new",
|
||||
"fixed_pkt_queue_enqueue",
|
||||
"fixed_pkt_queue_new",
|
||||
"list_append",
|
||||
"list_prepend",
|
||||
"list_insert_after",
|
||||
"list_contains",
|
||||
"list_get_node",
|
||||
"hash_function_blob",
|
||||
"cb_no_body",
|
||||
"cb_on_body",
|
||||
"profile_tab",
|
||||
"get_arg",
|
||||
"trim",
|
||||
"buf$",
|
||||
"process_appended_hash_and_sig$constprop$0",
|
||||
"uuidType",
|
||||
"allocate_svc_db_buf",
|
||||
"_hostname_is_ours",
|
||||
"s_hli_handlers",
|
||||
"tick_cb",
|
||||
"idle_cb",
|
||||
"input",
|
||||
"entry_find",
|
||||
"section_find",
|
||||
"find_bucket_entry_",
|
||||
"config_has_section",
|
||||
"hli_queue_create",
|
||||
"hli_queue_get",
|
||||
"hli_c_handler",
|
||||
"future_ready",
|
||||
"future_await",
|
||||
"future_new",
|
||||
"pkt_queue_enqueue",
|
||||
"pkt_queue_dequeue",
|
||||
"pkt_queue_cleanup",
|
||||
"pkt_queue_create",
|
||||
"pkt_queue_destroy",
|
||||
"fixed_pkt_queue_dequeue",
|
||||
"osi_alarm_cancel",
|
||||
"osi_alarm_is_active",
|
||||
"osi_sem_take",
|
||||
"osi_event_create",
|
||||
"osi_event_bind",
|
||||
"alarm_cb_handler",
|
||||
"list_foreach",
|
||||
"list_back",
|
||||
"list_front",
|
||||
"list_clear",
|
||||
"fixed_queue_try_peek_first",
|
||||
"translate_path",
|
||||
"get_idx",
|
||||
"find_key",
|
||||
"init",
|
||||
"end",
|
||||
"start",
|
||||
"set_read_value",
|
||||
"copy_address_list",
|
||||
"copy_and_key",
|
||||
"sdk_cfg_opts",
|
||||
"leftshift_onebit",
|
||||
"config_section_end",
|
||||
"config_section_begin",
|
||||
"find_entry_and_check_all_reset",
|
||||
"image_validate",
|
||||
"xPendingReadyList",
|
||||
"vListInitialise",
|
||||
"lock_init_generic",
|
||||
"ant_bttx_cfg",
|
||||
"ant_dft_cfg",
|
||||
"cs_send_to_ctrl_sock",
|
||||
"config_llc_util_funcs_reset",
|
||||
"make_set_adv_report_flow_control",
|
||||
"make_set_event_mask",
|
||||
"raw_new",
|
||||
"raw_remove",
|
||||
"BTE_InitStack",
|
||||
"parse_read_local_supported_features_response",
|
||||
"__math_invalidf",
|
||||
"tinytens",
|
||||
"__mprec_tinytens",
|
||||
"__mprec_bigtens",
|
||||
"vRingbufferDelete",
|
||||
"vRingbufferDeleteWithCaps",
|
||||
"vRingbufferReturnItem",
|
||||
"vRingbufferReturnItemFromISR",
|
||||
"get_acl_data_size_ble",
|
||||
"get_features_ble",
|
||||
"get_features_classic",
|
||||
"get_acl_packet_size_ble",
|
||||
"get_acl_packet_size_classic",
|
||||
"supports_extended_inquiry_response",
|
||||
"supports_rssi_with_inquiry_results",
|
||||
"supports_interlaced_inquiry_scan",
|
||||
"supports_reading_remote_extended_features",
|
||||
],
|
||||
"bluetooth_ll": [
|
||||
"lld_pdu_",
|
||||
"ld_acl_",
|
||||
"lld_stop_ind_handler",
|
||||
"lld_evt_winsize_change",
|
||||
"config_lld_evt_funcs_reset",
|
||||
"config_lld_funcs_reset",
|
||||
"config_llm_funcs_reset",
|
||||
"llm_set_long_adv_data",
|
||||
"lld_retry_tx_prog",
|
||||
"llc_link_sup_to_ind_handler",
|
||||
"config_llc_funcs_reset",
|
||||
"lld_evt_rxwin_compute",
|
||||
"config_btdm_funcs_reset",
|
||||
"config_ea_funcs_reset",
|
||||
"llc_defalut_state_tab_reset",
|
||||
"config_rwip_funcs_reset",
|
||||
"ke_lmp_rx_flooding_detect",
|
||||
],
|
||||
}
|
||||
|
||||
# Demangled patterns: patterns found in demangled C++ names
|
||||
DEMANGLED_PATTERNS = {
|
||||
"gpio_driver": ["GPIO"],
|
||||
"uart_driver": ["UART"],
|
||||
"network_stack": [
|
||||
"lwip",
|
||||
"tcp",
|
||||
"udp",
|
||||
"ip4",
|
||||
"ip6",
|
||||
"dhcp",
|
||||
"dns",
|
||||
"netif",
|
||||
"ethernet",
|
||||
"ppp",
|
||||
"slip",
|
||||
],
|
||||
"wifi_stack": ["NetworkInterface"],
|
||||
"nimble_bt": [
|
||||
"nimble",
|
||||
"NimBLE",
|
||||
"ble_hs",
|
||||
"ble_gap",
|
||||
"ble_gatt",
|
||||
"ble_att",
|
||||
"ble_l2cap",
|
||||
"ble_sm",
|
||||
],
|
||||
"crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"],
|
||||
"cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"],
|
||||
"static_init": ["__static_initialization"],
|
||||
"rtti": ["__type_info", "__class_type_info"],
|
||||
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
|
||||
"async_tcp": ["AsyncClient", "AsyncServer"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"json_lib": [
|
||||
"ArduinoJson",
|
||||
"JsonDocument",
|
||||
"JsonArray",
|
||||
"JsonObject",
|
||||
"deserialize",
|
||||
"serialize",
|
||||
],
|
||||
"http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"],
|
||||
"logging": ["log", "Log", "print", "Print", "diag_"],
|
||||
"authentication": ["checkDigestAuthentication"],
|
||||
"libgcc": ["libgcc"],
|
||||
"esp_system": ["esp_", "ESP"],
|
||||
"arduino": ["arduino"],
|
||||
"nvs": ["nvs_", "_ZTVN3nvs", "nvs::"],
|
||||
"filesystem": ["spiffs", "vfs"],
|
||||
"libc": ["newlib"],
|
||||
}
|
||||
|
||||
# Patterns for categorizing ESPHome core symbols into subcategories
|
||||
CORE_SUBCATEGORY_PATTERNS = {
|
||||
"Component Framework": ["Component"],
|
||||
"Application Core": ["Application"],
|
||||
"Scheduler": ["Scheduler"],
|
||||
"Component Iterator": ["ComponentIterator"],
|
||||
"Helper Functions": ["Helpers", "helpers"],
|
||||
"Preferences/Storage": ["Preferences", "ESPPreferences"],
|
||||
"I/O Utilities": ["HighFrequencyLoopRequester"],
|
||||
"String Utilities": ["str_"],
|
||||
"Bit Utilities": ["reverse_bits"],
|
||||
"Data Conversion": ["convert_"],
|
||||
"Network Utilities": ["network", "IPAddress"],
|
||||
"API Protocol": ["api::"],
|
||||
"WiFi Manager": ["wifi::"],
|
||||
"MQTT Client": ["mqtt::"],
|
||||
"Logger": ["logger::"],
|
||||
"OTA Updates": ["ota::"],
|
||||
"Web Server": ["web_server::"],
|
||||
"Time Management": ["time::"],
|
||||
"Sensor Framework": ["sensor::"],
|
||||
"Binary Sensor": ["binary_sensor::"],
|
||||
"Switch Framework": ["switch_::"],
|
||||
"Light Framework": ["light::"],
|
||||
"Climate Framework": ["climate::"],
|
||||
"Cover Framework": ["cover::"],
|
||||
}
|
||||
121
esphome/analyze_memory/helpers.py
Normal file
121
esphome/analyze_memory/helpers.py
Normal file
@@ -0,0 +1,121 @@
|
||||
"""Helper functions for memory analysis."""
|
||||
|
||||
from functools import cache
|
||||
from pathlib import Path
|
||||
|
||||
from .const import SECTION_MAPPING
|
||||
|
||||
# Import namespace constant from parent module
|
||||
# Note: This would create a circular import if done at module level,
|
||||
# so we'll define it locally here as well
|
||||
_NAMESPACE_ESPHOME = "esphome::"
|
||||
|
||||
|
||||
# Get the list of actual ESPHome components by scanning the components directory
|
||||
@cache
|
||||
def get_esphome_components():
|
||||
"""Get set of actual ESPHome components from the components directory."""
|
||||
# Find the components directory relative to this file
|
||||
# Go up two levels from analyze_memory/helpers.py to esphome/
|
||||
current_dir = Path(__file__).parent.parent
|
||||
components_dir = current_dir / "components"
|
||||
|
||||
if not components_dir.exists() or not components_dir.is_dir():
|
||||
return frozenset()
|
||||
|
||||
return frozenset(
|
||||
item.name
|
||||
for item in components_dir.iterdir()
|
||||
if item.is_dir()
|
||||
and not item.name.startswith(".")
|
||||
and not item.name.startswith("__")
|
||||
)
|
||||
|
||||
|
||||
@cache
|
||||
def get_component_class_patterns(component_name: str) -> list[str]:
|
||||
"""Generate component class name patterns for symbol matching.
|
||||
|
||||
Args:
|
||||
component_name: The component name (e.g., "ota", "wifi", "api")
|
||||
|
||||
Returns:
|
||||
List of pattern strings to match against demangled symbols
|
||||
"""
|
||||
component_upper = component_name.upper()
|
||||
component_camel = component_name.replace("_", "").title()
|
||||
return [
|
||||
f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
|
||||
f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
|
||||
f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
|
||||
]
|
||||
|
||||
|
||||
def map_section_name(raw_section: str) -> str | None:
|
||||
"""Map raw section name to standard section.
|
||||
|
||||
Args:
|
||||
raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1")
|
||||
|
||||
Returns:
|
||||
Standard section name (".text", ".rodata", ".data", ".bss") or None
|
||||
"""
|
||||
for standard_section, patterns in SECTION_MAPPING.items():
|
||||
if any(pattern in raw_section for pattern in patterns):
|
||||
return standard_section
|
||||
return None
|
||||
|
||||
|
||||
def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
|
||||
"""Parse a single symbol line from objdump output.
|
||||
|
||||
Args:
|
||||
line: Line from objdump -t output
|
||||
|
||||
Returns:
|
||||
Tuple of (section, name, size, address) or None if not a valid symbol.
|
||||
Format: address l/g w/d F/O section size name
|
||||
Example: 40084870 l F .iram0.text 00000000 _xt_user_exc
|
||||
"""
|
||||
parts = line.split()
|
||||
if len(parts) < 5:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Validate and extract address
|
||||
address = parts[0]
|
||||
int(address, 16)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
# Look for F (function) or O (object) flag
|
||||
if "F" not in parts and "O" not in parts:
|
||||
return None
|
||||
|
||||
# Find section, size, and name
|
||||
for i, part in enumerate(parts):
|
||||
if not part.startswith("."):
|
||||
continue
|
||||
|
||||
section = map_section_name(part)
|
||||
if not section:
|
||||
break
|
||||
|
||||
# Need at least size field after section
|
||||
if i + 1 >= len(parts):
|
||||
break
|
||||
|
||||
try:
|
||||
size = int(parts[i + 1], 16)
|
||||
except ValueError:
|
||||
break
|
||||
|
||||
# Need symbol name and non-zero size
|
||||
if i + 2 >= len(parts) or size == 0:
|
||||
break
|
||||
|
||||
name = " ".join(parts[i + 2 :])
|
||||
return (section, name, size, address)
|
||||
|
||||
return None
|
||||
@@ -380,12 +380,19 @@ async def homeassistant_service_to_code(
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, False)
|
||||
templ = await cg.templatable(config[CONF_ACTION], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
@@ -458,15 +465,23 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
templ = await cg.templatable(config[CONF_EVENT], args, None)
|
||||
cg.add(var.set_service(templ))
|
||||
|
||||
# Initialize FixedVectors with exact sizes from config
|
||||
cg.add(var.init_data(len(config[CONF_DATA])))
|
||||
for key, value in config[CONF_DATA].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data(key, templ))
|
||||
|
||||
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
|
||||
for key, value in config[CONF_DATA_TEMPLATE].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_data_template(key, templ))
|
||||
|
||||
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
|
||||
for key, value in config[CONF_VARIABLES].items():
|
||||
templ = await cg.templatable(value, args, None)
|
||||
cg.add(var.add_variable(key, templ))
|
||||
|
||||
return var
|
||||
|
||||
|
||||
@@ -489,6 +504,8 @@ async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, arg
|
||||
serv = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, serv, True)
|
||||
cg.add(var.set_service("esphome.tag_scanned"))
|
||||
# Initialize FixedVector with exact size (1 data field)
|
||||
cg.add(var.init_data(1))
|
||||
templ = await cg.templatable(config[CONF_TAG], args, cg.std_string)
|
||||
cg.add(var.add_data("tag_id", templ))
|
||||
return var
|
||||
|
||||
@@ -876,10 +876,10 @@ message ExecuteServiceArgument {
|
||||
string string_ = 4;
|
||||
// ESPHome 1.14 (api v1.3) make int a signed value
|
||||
sint32 int_ = 5;
|
||||
repeated bool bool_array = 6 [packed=false];
|
||||
repeated sint32 int_array = 7 [packed=false];
|
||||
repeated float float_array = 8 [packed=false];
|
||||
repeated string string_array = 9;
|
||||
repeated bool bool_array = 6 [packed=false, (fixed_vector) = true];
|
||||
repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true];
|
||||
repeated float float_array = 8 [packed=false, (fixed_vector) = true];
|
||||
repeated string string_array = 9 [(fixed_vector) = true];
|
||||
}
|
||||
message ExecuteServiceRequest {
|
||||
option (id) = 42;
|
||||
@@ -888,7 +888,7 @@ message ExecuteServiceRequest {
|
||||
option (ifdef) = "USE_API_SERVICES";
|
||||
|
||||
fixed32 key = 1;
|
||||
repeated ExecuteServiceArgument args = 2;
|
||||
repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true];
|
||||
}
|
||||
|
||||
// ==================== CAMERA ====================
|
||||
@@ -987,8 +987,8 @@ message ListEntitiesClimateResponse {
|
||||
string name = 3;
|
||||
reserved 4; // Deprecated: was string unique_id
|
||||
|
||||
bool supports_current_temperature = 5;
|
||||
bool supports_two_point_target_temperature = 6;
|
||||
bool supports_current_temperature = 5; // Deprecated: use feature_flags
|
||||
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
|
||||
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
|
||||
float visual_min_temperature = 8;
|
||||
float visual_max_temperature = 9;
|
||||
@@ -997,7 +997,7 @@ message ListEntitiesClimateResponse {
|
||||
// is if CLIMATE_PRESET_AWAY exists is supported_presets
|
||||
// Deprecated in API version 1.5
|
||||
bool legacy_supports_away = 11 [deprecated=true];
|
||||
bool supports_action = 12;
|
||||
bool supports_action = 12; // Deprecated: use feature_flags
|
||||
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
|
||||
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
|
||||
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
|
||||
@@ -1007,11 +1007,12 @@ message ListEntitiesClimateResponse {
|
||||
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
|
||||
EntityCategory entity_category = 20;
|
||||
float visual_current_temperature_step = 21;
|
||||
bool supports_current_humidity = 22;
|
||||
bool supports_target_humidity = 23;
|
||||
bool supports_current_humidity = 22; // Deprecated: use feature_flags
|
||||
bool supports_target_humidity = 23; // Deprecated: use feature_flags
|
||||
float visual_min_humidity = 24;
|
||||
float visual_max_humidity = 25;
|
||||
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
|
||||
uint32 feature_flags = 27;
|
||||
}
|
||||
message ClimateStateResponse {
|
||||
option (id) = 47;
|
||||
|
||||
@@ -27,6 +27,9 @@
|
||||
#ifdef USE_BLUETOOTH_PROXY
|
||||
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
|
||||
#endif
|
||||
#ifdef USE_CLIMATE
|
||||
#include "esphome/components/climate/climate_mode.h"
|
||||
#endif
|
||||
#ifdef USE_VOICE_ASSISTANT
|
||||
#include "esphome/components/voice_assistant/voice_assistant.h"
|
||||
#endif
|
||||
@@ -623,9 +626,10 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
auto traits = climate->get_traits();
|
||||
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
|
||||
resp.action = static_cast<enums::ClimateAction>(climate->action);
|
||||
if (traits.get_supports_current_temperature())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE))
|
||||
resp.current_temperature = climate->current_temperature;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
resp.target_temperature_low = climate->target_temperature_low;
|
||||
resp.target_temperature_high = climate->target_temperature_high;
|
||||
} else {
|
||||
@@ -644,9 +648,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
||||
}
|
||||
if (traits.get_supports_swing_modes())
|
||||
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
|
||||
if (traits.get_supports_current_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY))
|
||||
resp.current_humidity = climate->current_humidity;
|
||||
if (traits.get_supports_target_humidity())
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
||||
resp.target_humidity = climate->target_humidity;
|
||||
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
||||
is_single);
|
||||
@@ -656,10 +660,14 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
auto *climate = static_cast<climate::Climate *>(entity);
|
||||
ListEntitiesClimateResponse msg;
|
||||
auto traits = climate->get_traits();
|
||||
// Flags set for backward compatibility, deprecated in 2025.11.0
|
||||
msg.supports_current_temperature = traits.get_supports_current_temperature();
|
||||
msg.supports_current_humidity = traits.get_supports_current_humidity();
|
||||
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
|
||||
msg.supports_target_humidity = traits.get_supports_target_humidity();
|
||||
msg.supports_action = traits.get_supports_action();
|
||||
// Current feature flags and other supported parameters
|
||||
msg.feature_flags = traits.get_feature_flags();
|
||||
msg.supported_modes = &traits.get_supported_modes_for_api_();
|
||||
msg.visual_min_temperature = traits.get_visual_min_temperature();
|
||||
msg.visual_max_temperature = traits.get_visual_max_temperature();
|
||||
@@ -667,7 +675,6 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
||||
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
|
||||
msg.visual_min_humidity = traits.get_visual_min_humidity();
|
||||
msg.visual_max_humidity = traits.get_visual_max_humidity();
|
||||
msg.supports_action = traits.get_supports_action();
|
||||
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
|
||||
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
|
||||
msg.supported_presets = &traits.get_supported_presets_for_api_();
|
||||
@@ -1406,7 +1413,7 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
|
||||
|
||||
HelloResponse resp;
|
||||
resp.api_version_major = 1;
|
||||
resp.api_version_minor = 12;
|
||||
resp.api_version_minor = 13;
|
||||
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
|
||||
resp.set_server_info(ESPHOME_VERSION_REF);
|
||||
resp.set_name(StringRef(App.get_name()));
|
||||
|
||||
@@ -242,7 +242,6 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
const std::string &name = App.get_name();
|
||||
const std::string &mac = get_mac_address();
|
||||
|
||||
std::vector<uint8_t> msg;
|
||||
// Calculate positions and sizes
|
||||
size_t name_len = name.size() + 1; // including null terminator
|
||||
size_t mac_len = mac.size() + 1; // including null terminator
|
||||
@@ -250,17 +249,17 @@ APIError APINoiseFrameHelper::state_action_() {
|
||||
size_t mac_offset = name_offset + name_len;
|
||||
size_t total_size = 1 + name_len + mac_len;
|
||||
|
||||
msg.resize(total_size);
|
||||
auto msg = std::make_unique<uint8_t[]>(total_size);
|
||||
|
||||
// chosen proto
|
||||
msg[0] = 0x01;
|
||||
|
||||
// node name, terminated by null byte
|
||||
std::memcpy(msg.data() + name_offset, name.c_str(), name_len);
|
||||
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
|
||||
// node mac, terminated by null byte
|
||||
std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len);
|
||||
std::memcpy(msg.get() + mac_offset, mac.c_str(), mac_len);
|
||||
|
||||
aerr = write_frame_(msg.data(), msg.size());
|
||||
aerr = write_frame_(msg.get(), total_size);
|
||||
if (aerr != APIError::OK)
|
||||
return aerr;
|
||||
|
||||
@@ -339,32 +338,32 @@ void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reaso
|
||||
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
||||
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
||||
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message from PROGMEM
|
||||
if (reason_len > 0) {
|
||||
memcpy_P(data.data() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||
}
|
||||
#else
|
||||
// Normal memory access
|
||||
const char *reason_str = LOG_STR_ARG(reason);
|
||||
size_t reason_len = strlen(reason_str);
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(reason_len + 1);
|
||||
size_t data_size = reason_len + 1;
|
||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
||||
data[0] = 0x01; // failure
|
||||
|
||||
// Copy error message in bulk
|
||||
if (reason_len > 0) {
|
||||
std::memcpy(data.data() + 1, reason_str, reason_len);
|
||||
std::memcpy(data.get() + 1, reason_str, reason_len);
|
||||
}
|
||||
#endif
|
||||
|
||||
// temporarily remove failed state
|
||||
auto orig_state = state_;
|
||||
state_ = State::EXPLICIT_REJECT;
|
||||
write_frame_(data.data(), data.size());
|
||||
write_frame_(data.get(), data_size);
|
||||
state_ = orig_state;
|
||||
}
|
||||
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||
|
||||
@@ -1064,6 +1064,17 @@ bool ExecuteServiceArgument::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceArgument::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_bool_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 6);
|
||||
this->bool_array.init(count_bool_array);
|
||||
uint32_t count_int_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 7);
|
||||
this->int_array.init(count_int_array);
|
||||
uint32_t count_float_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 8);
|
||||
this->float_array.init(count_float_array);
|
||||
uint32_t count_string_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 9);
|
||||
this->string_array.init(count_string_array);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
bool ExecuteServiceRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
|
||||
switch (field_id) {
|
||||
case 2:
|
||||
@@ -1085,6 +1096,11 @@ bool ExecuteServiceRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
void ExecuteServiceRequest::decode(const uint8_t *buffer, size_t length) {
|
||||
uint32_t count_args = ProtoDecodableMessage::count_repeated_field(buffer, length, 2);
|
||||
this->args.init(count_args);
|
||||
ProtoDecodableMessage::decode(buffer, length);
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_CAMERA
|
||||
void ListEntitiesCameraResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
@@ -1185,6 +1201,7 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
#ifdef USE_DEVICES
|
||||
buffer.encode_uint32(26, this->device_id);
|
||||
#endif
|
||||
buffer.encode_uint32(27, this->feature_flags);
|
||||
}
|
||||
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
size.add_length(1, this->object_id_ref_.size());
|
||||
@@ -1239,6 +1256,7 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
|
||||
#ifdef USE_DEVICES
|
||||
size.add_uint32(2, this->device_id);
|
||||
#endif
|
||||
size.add_uint32(2, this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
|
||||
buffer.encode_fixed32(1, this->key);
|
||||
|
||||
@@ -1279,10 +1279,11 @@ class ExecuteServiceArgument final : public ProtoDecodableMessage {
|
||||
float float_{0.0f};
|
||||
std::string string_{};
|
||||
int32_t int_{0};
|
||||
std::vector<bool> bool_array{};
|
||||
std::vector<int32_t> int_array{};
|
||||
std::vector<float> float_array{};
|
||||
std::vector<std::string> string_array{};
|
||||
FixedVector<bool> bool_array{};
|
||||
FixedVector<int32_t> int_array{};
|
||||
FixedVector<float> float_array{};
|
||||
FixedVector<std::string> string_array{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1300,7 +1301,8 @@ class ExecuteServiceRequest final : public ProtoDecodableMessage {
|
||||
const char *message_name() const override { return "execute_service_request"; }
|
||||
#endif
|
||||
uint32_t key{0};
|
||||
std::vector<ExecuteServiceArgument> args{};
|
||||
FixedVector<ExecuteServiceArgument> args{};
|
||||
void decode(const uint8_t *buffer, size_t length) override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
void dump_to(std::string &out) const override;
|
||||
#endif
|
||||
@@ -1369,7 +1371,7 @@ class CameraImageRequest final : public ProtoDecodableMessage {
|
||||
class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
public:
|
||||
static constexpr uint8_t MESSAGE_TYPE = 46;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 145;
|
||||
static constexpr uint8_t ESTIMATED_SIZE = 150;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
const char *message_name() const override { return "list_entities_climate_response"; }
|
||||
#endif
|
||||
@@ -1390,6 +1392,7 @@ class ListEntitiesClimateResponse final : public InfoResponseProtoMessage {
|
||||
bool supports_target_humidity{false};
|
||||
float visual_min_humidity{0.0f};
|
||||
float visual_max_humidity{0.0f};
|
||||
uint32_t feature_flags{0};
|
||||
void encode(ProtoWriteBuffer buffer) const override;
|
||||
void calculate_size(ProtoSize &size) const override;
|
||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||
|
||||
@@ -1292,6 +1292,7 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
|
||||
#ifdef USE_DEVICES
|
||||
dump_field(out, "device_id", this->device_id);
|
||||
#endif
|
||||
dump_field(out, "feature_flags", this->feature_flags);
|
||||
}
|
||||
void ClimateStateResponse::dump_to(std::string &out) const {
|
||||
MessageDumpHelper helper(out, "ClimateStateResponse");
|
||||
|
||||
@@ -41,10 +41,14 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
|
||||
|
||||
template<typename... Ts> class TemplatableKeyValuePair {
|
||||
public:
|
||||
// Default constructor needed for FixedVector::emplace_back()
|
||||
TemplatableKeyValuePair() = default;
|
||||
|
||||
// Keys are always string literals from YAML dictionary keys (e.g., "code", "event")
|
||||
// and never templatable values or lambdas. Only the value parameter can be a lambda/template.
|
||||
// Using pass-by-value with std::move allows optimal performance for both lvalues and rvalues.
|
||||
template<typename T> TemplatableKeyValuePair(std::string key, T value) : key(std::move(key)), value(value) {}
|
||||
|
||||
std::string key;
|
||||
TemplatableStringValue<Ts...> value;
|
||||
};
|
||||
@@ -93,15 +97,22 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
|
||||
template<typename T> void set_service(T service) { this->service_ = service; }
|
||||
|
||||
// Initialize FixedVector members - called from Python codegen with compile-time known sizes.
|
||||
// Must be called before any add_* methods; capacity must match the number of subsequent add_* calls.
|
||||
void init_data(size_t count) { this->data_.init(count); }
|
||||
void init_data_template(size_t count) { this->data_template_.init(count); }
|
||||
void init_variables(size_t count) { this->variables_.init(count); }
|
||||
|
||||
// Keys are always string literals from the Python code generation (e.g., cg.add(var.add_data("tag_id", templ))).
|
||||
// The value parameter can be a lambda/template, but keys are never templatable.
|
||||
// Using pass-by-value allows the compiler to optimize for both lvalues and rvalues.
|
||||
template<typename T> void add_data(std::string key, T value) { this->data_.emplace_back(std::move(key), value); }
|
||||
template<typename T> void add_data_template(std::string key, T value) {
|
||||
this->data_template_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename T> void add_variable(std::string key, T value) {
|
||||
this->variables_.emplace_back(std::move(key), value);
|
||||
template<typename K, typename V> void add_data_template(K &&key, V &&value) {
|
||||
this->add_kv_(this->data_template_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
template<typename K, typename V> void add_variable(K &&key, V &&value) {
|
||||
this->add_kv_(this->variables_, std::forward<K>(key), std::forward<V>(value));
|
||||
}
|
||||
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
@@ -174,6 +185,13 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
}
|
||||
|
||||
protected:
|
||||
// Helper to add key-value pairs to FixedVectors with perfect forwarding to avoid copies
|
||||
template<typename K, typename V> void add_kv_(FixedVector<TemplatableKeyValuePair<Ts...>> &vec, K &&key, V &&value) {
|
||||
auto &kv = vec.emplace_back();
|
||||
kv.key = std::forward<K>(key);
|
||||
kv.value = std::forward<V>(value);
|
||||
}
|
||||
|
||||
template<typename VectorType, typename SourceType>
|
||||
static void populate_service_map(VectorType &dest, SourceType &source, Ts... x) {
|
||||
dest.init(source.size());
|
||||
@@ -186,9 +204,9 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
||||
|
||||
APIServer *parent_;
|
||||
TemplatableStringValue<Ts...> service_{};
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
std::vector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> data_template_;
|
||||
FixedVector<TemplatableKeyValuePair<Ts...>> variables_;
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||
TemplatableStringValue<Ts...> response_template_{""};
|
||||
|
||||
@@ -7,6 +7,69 @@ namespace esphome::api {
|
||||
|
||||
static const char *const TAG = "api.proto";
|
||||
|
||||
uint32_t ProtoDecodableMessage::count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id) {
|
||||
uint32_t count = 0;
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
|
||||
while (ptr < end) {
|
||||
uint32_t consumed;
|
||||
|
||||
// Parse field header (tag)
|
||||
auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
break; // Invalid data, stop counting
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
// Count if this is the target field
|
||||
if (field_id == target_field_id) {
|
||||
count++;
|
||||
}
|
||||
|
||||
// Skip field data based on wire type
|
||||
switch (field_type) {
|
||||
case WIRE_TYPE_VARINT: { // VarInt - parse and skip
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count; // Invalid data, return what we have
|
||||
}
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited - parse length and skip data
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
return count;
|
||||
}
|
||||
uint32_t field_length = res->as_uint32();
|
||||
ptr += consumed;
|
||||
if (ptr + field_length > end) {
|
||||
return count; // Out of bounds
|
||||
}
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit - skip 4 bytes
|
||||
if (ptr + 4 > end) {
|
||||
return count;
|
||||
}
|
||||
ptr += 4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
// Unknown wire type, can't continue
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
const uint8_t *ptr = buffer;
|
||||
const uint8_t *end = buffer + length;
|
||||
@@ -22,12 +85,12 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
}
|
||||
|
||||
uint32_t tag = res->as_uint32();
|
||||
uint32_t field_type = tag & 0b111;
|
||||
uint32_t field_type = tag & WIRE_TYPE_MASK;
|
||||
uint32_t field_id = tag >> 3;
|
||||
ptr += consumed;
|
||||
|
||||
switch (field_type) {
|
||||
case 0: { // VarInt
|
||||
case WIRE_TYPE_VARINT: { // VarInt
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid VarInt at offset %ld", (long) (ptr - buffer));
|
||||
@@ -39,7 +102,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += consumed;
|
||||
break;
|
||||
}
|
||||
case 2: { // Length-delimited
|
||||
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited
|
||||
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
|
||||
if (!res.has_value()) {
|
||||
ESP_LOGV(TAG, "Invalid Length Delimited at offset %ld", (long) (ptr - buffer));
|
||||
@@ -57,7 +120,7 @@ void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
|
||||
ptr += field_length;
|
||||
break;
|
||||
}
|
||||
case 5: { // 32-bit
|
||||
case WIRE_TYPE_FIXED32: { // 32-bit
|
||||
if (ptr + 4 > end) {
|
||||
ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at offset %ld", (long) (ptr - buffer));
|
||||
return;
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
|
||||
namespace esphome::api {
|
||||
|
||||
// Protocol Buffer wire type constants
|
||||
// See https://protobuf.dev/programming-guides/encoding/#structure
|
||||
constexpr uint8_t WIRE_TYPE_VARINT = 0; // int32, int64, uint32, uint64, sint32, sint64, bool, enum
|
||||
constexpr uint8_t WIRE_TYPE_LENGTH_DELIMITED = 2; // string, bytes, embedded messages, packed repeated fields
|
||||
constexpr uint8_t WIRE_TYPE_FIXED32 = 5; // fixed32, sfixed32, float
|
||||
constexpr uint8_t WIRE_TYPE_MASK = 0b111; // Mask to extract wire type from tag
|
||||
|
||||
// Helper functions for ZigZag encoding/decoding
|
||||
inline constexpr uint32_t encode_zigzag32(int32_t value) {
|
||||
return (static_cast<uint32_t>(value) << 1) ^ (static_cast<uint32_t>(value >> 31));
|
||||
@@ -241,7 +248,7 @@ class ProtoWriteBuffer {
|
||||
* Following https://protobuf.dev/programming-guides/encoding/#structure
|
||||
*/
|
||||
void encode_field_raw(uint32_t field_id, uint32_t type) {
|
||||
uint32_t val = (field_id << 3) | (type & 0b111);
|
||||
uint32_t val = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
this->encode_varint_raw(val);
|
||||
}
|
||||
void encode_string(uint32_t field_id, const char *string, size_t len, bool force = false) {
|
||||
@@ -354,7 +361,18 @@ class ProtoMessage {
|
||||
// Base class for messages that support decoding
|
||||
class ProtoDecodableMessage : public ProtoMessage {
|
||||
public:
|
||||
void decode(const uint8_t *buffer, size_t length);
|
||||
virtual void decode(const uint8_t *buffer, size_t length);
|
||||
|
||||
/**
|
||||
* Count occurrences of a repeated field in a protobuf buffer.
|
||||
* This is a lightweight scan that only parses tags and skips field data.
|
||||
*
|
||||
* @param buffer Pointer to the protobuf buffer
|
||||
* @param length Length of the buffer in bytes
|
||||
* @param target_field_id The field ID to count
|
||||
* @return Number of times the field appears in the buffer
|
||||
*/
|
||||
static uint32_t count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id);
|
||||
|
||||
protected:
|
||||
virtual bool decode_varint(uint32_t field_id, ProtoVarInt value) { return false; }
|
||||
@@ -482,7 +500,7 @@ class ProtoSize {
|
||||
* @return The number of bytes needed to encode the field ID and wire type
|
||||
*/
|
||||
static constexpr uint32_t field(uint32_t field_id, uint32_t type) {
|
||||
uint32_t tag = (field_id << 3) | (type & 0b111);
|
||||
uint32_t tag = (field_id << 3) | (type & WIRE_TYPE_MASK);
|
||||
return varint(tag);
|
||||
}
|
||||
|
||||
|
||||
@@ -12,16 +12,16 @@ template<> int32_t get_execute_arg_value<int32_t>(const ExecuteServiceArgument &
|
||||
template<> float get_execute_arg_value<float>(const ExecuteServiceArgument &arg) { return arg.float_; }
|
||||
template<> std::string get_execute_arg_value<std::string>(const ExecuteServiceArgument &arg) { return arg.string_; }
|
||||
template<> std::vector<bool> get_execute_arg_value<std::vector<bool>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.bool_array;
|
||||
return std::vector<bool>(arg.bool_array.begin(), arg.bool_array.end());
|
||||
}
|
||||
template<> std::vector<int32_t> get_execute_arg_value<std::vector<int32_t>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.int_array;
|
||||
return std::vector<int32_t>(arg.int_array.begin(), arg.int_array.end());
|
||||
}
|
||||
template<> std::vector<float> get_execute_arg_value<std::vector<float>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.float_array;
|
||||
return std::vector<float>(arg.float_array.begin(), arg.float_array.end());
|
||||
}
|
||||
template<> std::vector<std::string> get_execute_arg_value<std::vector<std::string>>(const ExecuteServiceArgument &arg) {
|
||||
return arg.string_array;
|
||||
return std::vector<std::string>(arg.string_array.begin(), arg.string_array.end());
|
||||
}
|
||||
|
||||
template<> enums::ServiceArgType to_service_arg_type<bool>() { return enums::SERVICE_ARG_TYPE_BOOL; }
|
||||
|
||||
@@ -55,7 +55,7 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
|
||||
|
||||
protected:
|
||||
virtual void execute(Ts... x) = 0;
|
||||
template<int... S> void execute_(const std::vector<ExecuteServiceArgument> &args, seq<S...> type) {
|
||||
template<typename ArgsContainer, int... S> void execute_(const ArgsContainer &args, seq<S...> type) {
|
||||
this->execute((get_execute_arg_value<Ts>(args[S]))...);
|
||||
}
|
||||
|
||||
|
||||
0
esphome/components/bh1900nux/__init__.py
Normal file
0
esphome/components/bh1900nux/__init__.py
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
54
esphome/components/bh1900nux/bh1900nux.cpp
Normal file
@@ -0,0 +1,54 @@
|
||||
#include "esphome/core/log.h"
|
||||
#include "bh1900nux.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
static const char *const TAG = "bh1900nux.sensor";
|
||||
|
||||
// I2C Registers
|
||||
static const uint8_t TEMPERATURE_REG = 0x00;
|
||||
static const uint8_t CONFIG_REG = 0x01; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_LOW_REG = 0x02; // Not used and supported yet
|
||||
static const uint8_t TEMPERATURE_HIGH_REG = 0x03; // Not used and supported yet
|
||||
static const uint8_t SOFT_RESET_REG = 0x04;
|
||||
|
||||
// I2C Command payloads
|
||||
static const uint8_t SOFT_RESET_PAYLOAD = 0x01; // Soft Reset value
|
||||
|
||||
static const float SENSOR_RESOLUTION = 0.0625f; // Sensor resolution per bit in degrees celsius
|
||||
|
||||
void BH1900NUXSensor::setup() {
|
||||
// Initialize I2C device
|
||||
i2c::ErrorCode result_code =
|
||||
this->write_register(SOFT_RESET_REG, &SOFT_RESET_PAYLOAD, 1); // Software Reset to check communication
|
||||
if (result_code != i2c::ERROR_OK) {
|
||||
this->mark_failed(ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::update() {
|
||||
uint8_t temperature_raw[2];
|
||||
if (this->read_register(TEMPERATURE_REG, temperature_raw, 2) != i2c::ERROR_OK) {
|
||||
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
|
||||
return;
|
||||
}
|
||||
|
||||
// Combined raw value, unsigned and unaligned 16 bit
|
||||
// Temperature is represented in just 12 bits, shift needed
|
||||
int16_t raw_temperature_register_value = encode_uint16(temperature_raw[0], temperature_raw[1]);
|
||||
raw_temperature_register_value >>= 4;
|
||||
float temperature_value = raw_temperature_register_value * SENSOR_RESOLUTION; // Apply sensor resolution
|
||||
|
||||
this->publish_state(temperature_value);
|
||||
}
|
||||
|
||||
void BH1900NUXSensor::dump_config() {
|
||||
LOG_SENSOR("", "BH1900NUX", this);
|
||||
LOG_I2C_DEVICE(this);
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
18
esphome/components/bh1900nux/bh1900nux.h
Normal file
@@ -0,0 +1,18 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/sensor/sensor.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace bh1900nux {
|
||||
|
||||
class BH1900NUXSensor : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
void setup() override;
|
||||
void update() override;
|
||||
void dump_config() override;
|
||||
};
|
||||
|
||||
} // namespace bh1900nux
|
||||
} // namespace esphome
|
||||
34
esphome/components/bh1900nux/sensor.py
Normal file
34
esphome/components/bh1900nux/sensor.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
STATE_CLASS_MEASUREMENT,
|
||||
UNIT_CELSIUS,
|
||||
)
|
||||
|
||||
DEPENDENCIES = ["i2c"]
|
||||
CODEOWNERS = ["@B48D81EFCC"]
|
||||
|
||||
sensor_ns = cg.esphome_ns.namespace("bh1900nux")
|
||||
BH1900NUXSensor = sensor_ns.class_(
|
||||
"BH1900NUXSensor", cg.PollingComponent, i2c.I2CDevice
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
sensor.sensor_schema(
|
||||
BH1900NUXSensor,
|
||||
accuracy_decimals=1,
|
||||
unit_of_measurement=UNIT_CELSIUS,
|
||||
device_class=DEVICE_CLASS_TEMPERATURE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
)
|
||||
.extend(cv.polling_component_schema("60s"))
|
||||
.extend(i2c.i2c_device_schema(0x48))
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = await sensor.new_sensor(config)
|
||||
await cg.register_component(var, config)
|
||||
await i2c.register_i2c_device(var, config)
|
||||
@@ -8,17 +8,30 @@ namespace cap1188 {
|
||||
static const char *const TAG = "cap1188";
|
||||
|
||||
void CAP1188Component::setup() {
|
||||
// Reset device using the reset pin
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(true);
|
||||
delay(100); // NOLINT
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(100); // NOLINT
|
||||
this->disable_loop();
|
||||
|
||||
// no reset pin
|
||||
if (this->reset_pin_ == nullptr) {
|
||||
this->finish_setup_();
|
||||
return;
|
||||
}
|
||||
|
||||
// reset pin configured so reset before finishing setup
|
||||
this->reset_pin_->setup();
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(true);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() {
|
||||
this->reset_pin_->digital_write(false);
|
||||
// delay after reset pin write
|
||||
this->set_timeout(100, [this]() { this->finish_setup_(); });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void CAP1188Component::finish_setup_() {
|
||||
// Check if CAP1188 is actually connected
|
||||
this->read_byte(CAP1188_PRODUCT_ID, &this->cap1188_product_id_);
|
||||
this->read_byte(CAP1188_MANUFACTURE_ID, &this->cap1188_manufacture_id_);
|
||||
@@ -44,6 +57,9 @@ void CAP1188Component::setup() {
|
||||
|
||||
// Speed up a bit
|
||||
this->write_byte(CAP1188_STAND_BY_CONFIGURATION, 0x30);
|
||||
|
||||
// Setup successful, so enable loop
|
||||
this->enable_loop();
|
||||
}
|
||||
|
||||
void CAP1188Component::dump_config() {
|
||||
|
||||
@@ -49,6 +49,8 @@ class CAP1188Component : public Component, public i2c::I2CDevice {
|
||||
void loop() override;
|
||||
|
||||
protected:
|
||||
void finish_setup_();
|
||||
|
||||
std::vector<CAP1188Channel *> channels_{};
|
||||
uint8_t touch_threshold_{0x20};
|
||||
uint8_t allow_multiple_touches_{0x80};
|
||||
|
||||
@@ -96,7 +96,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
if (this->target_temperature_.has_value()) {
|
||||
auto target = *this->target_temperature_;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set target temperature for climate device "
|
||||
"with two-point target temperature!");
|
||||
this->target_temperature_.reset();
|
||||
@@ -106,7 +107,8 @@ void ClimateCall::validate_() {
|
||||
}
|
||||
}
|
||||
if (this->target_temperature_low_.has_value() || this->target_temperature_high_.has_value()) {
|
||||
if (!traits.get_supports_two_point_target_temperature()) {
|
||||
if (!traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGW(TAG, " Cannot set low/high target temperature for this device!");
|
||||
this->target_temperature_low_.reset();
|
||||
this->target_temperature_high_.reset();
|
||||
@@ -350,13 +352,14 @@ void Climate::save_state_() {
|
||||
|
||||
state.mode = this->mode;
|
||||
auto traits = this->get_traits();
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
state.target_temperature_low = this->target_temperature_low;
|
||||
state.target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
state.target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
state.target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && fan_mode.has_value()) {
|
||||
@@ -400,7 +403,7 @@ void Climate::publish_state() {
|
||||
auto traits = this->get_traits();
|
||||
|
||||
ESP_LOGD(TAG, " Mode: %s", LOG_STR_ARG(climate_mode_to_string(this->mode)));
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGD(TAG, " Action: %s", LOG_STR_ARG(climate_action_to_string(this->action)));
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && this->fan_mode.has_value()) {
|
||||
@@ -418,19 +421,20 @@ void Climate::publish_state() {
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
ESP_LOGD(TAG, " Swing Mode: %s", LOG_STR_ARG(climate_swing_mode_to_string(this->swing_mode)));
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Current Temperature: %.2f°C", this->current_temperature);
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGD(TAG, " Target Temperature: Low: %.2f°C High: %.2f°C", this->target_temperature_low,
|
||||
this->target_temperature_high);
|
||||
} else {
|
||||
ESP_LOGD(TAG, " Target Temperature: %.2f°C", this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Current Humidity: %.0f%%", this->current_humidity);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGD(TAG, " Target Humidity: %.0f%%", this->target_humidity);
|
||||
}
|
||||
|
||||
@@ -485,13 +489,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
auto call = climate->make_call();
|
||||
auto traits = climate->get_traits();
|
||||
call.set_mode(this->mode);
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
call.set_target_temperature_low(this->target_temperature_low);
|
||||
call.set_target_temperature_high(this->target_temperature_high);
|
||||
} else {
|
||||
call.set_target_temperature(this->target_temperature);
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
call.set_target_humidity(this->target_humidity);
|
||||
}
|
||||
if (traits.get_supports_fan_modes() || !traits.get_supported_custom_fan_modes().empty()) {
|
||||
@@ -508,13 +513,14 @@ ClimateCall ClimateDeviceRestoreState::to_call(Climate *climate) {
|
||||
void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||
auto traits = climate->get_traits();
|
||||
climate->mode = this->mode;
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
climate->target_temperature_low = this->target_temperature_low;
|
||||
climate->target_temperature_high = this->target_temperature_high;
|
||||
} else {
|
||||
climate->target_temperature = this->target_temperature;
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
climate->target_humidity = this->target_humidity;
|
||||
}
|
||||
if (traits.get_supports_fan_modes() && !this->uses_custom_fan_mode) {
|
||||
@@ -580,28 +586,30 @@ void Climate::dump_traits_(const char *tag) {
|
||||
" Target: %.1f",
|
||||
traits.get_visual_min_temperature(), traits.get_visual_max_temperature(),
|
||||
traits.get_visual_target_temperature_step());
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " Current: %.1f", traits.get_visual_current_temperature_step());
|
||||
}
|
||||
if (traits.get_supports_target_humidity() || traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY |
|
||||
climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag,
|
||||
" - Min humidity: %.0f\n"
|
||||
" - Max humidity: %.0f",
|
||||
traits.get_visual_min_humidity(), traits.get_visual_max_humidity());
|
||||
}
|
||||
if (traits.get_supports_two_point_target_temperature()) {
|
||||
if (traits.has_feature_flags(CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports two-point target temperature");
|
||||
}
|
||||
if (traits.get_supports_current_temperature()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current temperature");
|
||||
}
|
||||
if (traits.get_supports_target_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports target humidity");
|
||||
}
|
||||
if (traits.get_supports_current_humidity()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports current humidity");
|
||||
}
|
||||
if (traits.get_supports_action()) {
|
||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_ACTION)) {
|
||||
ESP_LOGCONFIG(tag, " [x] Supports action");
|
||||
}
|
||||
if (!traits.get_supported_modes().empty()) {
|
||||
|
||||
@@ -98,6 +98,21 @@ enum ClimatePreset : uint8_t {
|
||||
CLIMATE_PRESET_ACTIVITY = 7,
|
||||
};
|
||||
|
||||
enum ClimateFeature : uint32_t {
|
||||
// Reporting current temperature is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_TEMPERATURE = 1 << 0,
|
||||
// Setting two target temperatures is supported (used in conjunction with CLIMATE_MODE_HEAT_COOL)
|
||||
CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE = 1 << 1,
|
||||
// Single-point mode is NOT supported (UI always displays two handles, setting 'target_temperature' is not supported)
|
||||
CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE = 1 << 2,
|
||||
// Reporting current humidity is supported
|
||||
CLIMATE_SUPPORTS_CURRENT_HUMIDITY = 1 << 3,
|
||||
// Setting a target humidity is supported
|
||||
CLIMATE_SUPPORTS_TARGET_HUMIDITY = 1 << 4,
|
||||
// Reporting current climate action is supported
|
||||
CLIMATE_SUPPORTS_ACTION = 1 << 5,
|
||||
};
|
||||
|
||||
/// Convert the given ClimateMode to a human-readable string.
|
||||
const LogString *climate_mode_to_string(ClimateMode mode);
|
||||
|
||||
|
||||
@@ -21,48 +21,92 @@ namespace climate {
|
||||
* - Target Temperature
|
||||
*
|
||||
* All other properties and modes are optional and the integration must mark
|
||||
* each of them as supported by setting the appropriate flag here.
|
||||
* each of them as supported by setting the appropriate flag(s) here.
|
||||
*
|
||||
* - supports current temperature - if the climate device supports reporting a current temperature
|
||||
* - supports two point target temperature - if the climate device's target temperature should be
|
||||
* split in target_temperature_low and target_temperature_high instead of just the single target_temperature
|
||||
* - feature flags: see ClimateFeatures enum in climate_mode.h
|
||||
* - supports modes:
|
||||
* - auto mode (automatic control)
|
||||
* - cool mode (lowers current temperature)
|
||||
* - heat mode (increases current temperature)
|
||||
* - dry mode (removes humidity from air)
|
||||
* - fan mode (only turns on fan)
|
||||
* - supports action - if the climate device supports reporting the active
|
||||
* current action of the device with the action property.
|
||||
* - supports fan modes - optionally, if it has a fan which can be configured in different ways:
|
||||
* - on, off, auto, high, medium, low, middle, focus, diffuse, quiet
|
||||
* - supports swing modes - optionally, if it has a swing which can be configured in different ways:
|
||||
* - off, both, vertical, horizontal
|
||||
*
|
||||
* This class also contains static data for the climate device display:
|
||||
* - visual min/max temperature - tells the frontend what range of temperatures the climate device
|
||||
* should display (gauge min/max values)
|
||||
* - visual min/max temperature/humidity - tells the frontend what range of temperature/humidity the
|
||||
* climate device should display (gauge min/max values)
|
||||
* - temperature step - the step with which to increase/decrease target temperature.
|
||||
* This also affects with how many decimal places the temperature is shown
|
||||
*/
|
||||
class ClimateTraits {
|
||||
public:
|
||||
bool get_supports_current_temperature() const { return this->supports_current_temperature_; }
|
||||
/// Get/set feature flags (see ClimateFeatures enum in climate_mode.h)
|
||||
uint32_t get_feature_flags() const { return this->feature_flags_; }
|
||||
void add_feature_flags(uint32_t feature_flags) { this->feature_flags_ |= feature_flags; }
|
||||
void clear_feature_flags(uint32_t feature_flags) { this->feature_flags_ &= ~feature_flags; }
|
||||
bool has_feature_flags(uint32_t feature_flags) const { return this->feature_flags_ & feature_flags; }
|
||||
void set_feature_flags(uint32_t feature_flags) { this->feature_flags_ = feature_flags; }
|
||||
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_temperature(bool supports_current_temperature) {
|
||||
this->supports_current_temperature_ = supports_current_temperature;
|
||||
if (supports_current_temperature) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_current_humidity() const { return this->supports_current_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_current_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_current_humidity(bool supports_current_humidity) {
|
||||
this->supports_current_humidity_ = supports_current_humidity;
|
||||
if (supports_current_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
}
|
||||
}
|
||||
bool get_supports_two_point_target_temperature() const { return this->supports_two_point_target_temperature_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_two_point_target_temperature() const {
|
||||
return this->has_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_two_point_target_temperature(bool supports_two_point_target_temperature) {
|
||||
this->supports_two_point_target_temperature_ = supports_two_point_target_temperature;
|
||||
if (supports_two_point_target_temperature)
|
||||
// Use CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE to mimic previous behavior
|
||||
{
|
||||
this->add_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE);
|
||||
}
|
||||
}
|
||||
bool get_supports_target_humidity() const { return this->supports_target_humidity_; }
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_target_humidity() const { return this->has_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_target_humidity(bool supports_target_humidity) {
|
||||
this->supports_target_humidity_ = supports_target_humidity;
|
||||
if (supports_target_humidity) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
}
|
||||
}
|
||||
ESPDEPRECATED("This method is deprecated, use get_feature_flags() instead", "2025.11.0")
|
||||
bool get_supports_action() const { return this->has_feature_flags(CLIMATE_SUPPORTS_ACTION); }
|
||||
ESPDEPRECATED("This method is deprecated, use add_feature_flags() instead", "2025.11.0")
|
||||
void set_supports_action(bool supports_action) {
|
||||
if (supports_action) {
|
||||
this->add_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
} else {
|
||||
this->clear_feature_flags(CLIMATE_SUPPORTS_ACTION);
|
||||
}
|
||||
}
|
||||
|
||||
void set_supported_modes(std::set<ClimateMode> modes) { this->supported_modes_ = std::move(modes); }
|
||||
void add_supported_mode(ClimateMode mode) { this->supported_modes_.insert(mode); }
|
||||
ESPDEPRECATED("This method is deprecated, use set_supported_modes() instead", "v1.20")
|
||||
@@ -82,9 +126,6 @@ class ClimateTraits {
|
||||
bool supports_mode(ClimateMode mode) const { return this->supported_modes_.count(mode); }
|
||||
const std::set<ClimateMode> &get_supported_modes() const { return this->supported_modes_; }
|
||||
|
||||
void set_supports_action(bool supports_action) { this->supports_action_ = supports_action; }
|
||||
bool get_supports_action() const { return this->supports_action_; }
|
||||
|
||||
void set_supported_fan_modes(std::set<ClimateFanMode> modes) { this->supported_fan_modes_ = std::move(modes); }
|
||||
void add_supported_fan_mode(ClimateFanMode mode) { this->supported_fan_modes_.insert(mode); }
|
||||
void add_supported_custom_fan_mode(const std::string &mode) { this->supported_custom_fan_modes_.insert(mode); }
|
||||
@@ -219,24 +260,20 @@ class ClimateTraits {
|
||||
}
|
||||
}
|
||||
|
||||
bool supports_current_temperature_{false};
|
||||
bool supports_current_humidity_{false};
|
||||
bool supports_two_point_target_temperature_{false};
|
||||
bool supports_target_humidity_{false};
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
bool supports_action_{false};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
|
||||
uint32_t feature_flags_{0};
|
||||
float visual_min_temperature_{10};
|
||||
float visual_max_temperature_{30};
|
||||
float visual_target_temperature_step_{0.1};
|
||||
float visual_current_temperature_step_{0.1};
|
||||
float visual_min_humidity_{30};
|
||||
float visual_max_humidity_{99};
|
||||
|
||||
std::set<climate::ClimateMode> supported_modes_ = {climate::CLIMATE_MODE_OFF};
|
||||
std::set<climate::ClimateFanMode> supported_fan_modes_;
|
||||
std::set<climate::ClimateSwingMode> supported_swing_modes_;
|
||||
std::set<climate::ClimatePreset> supported_presets_;
|
||||
std::set<std::string> supported_custom_fan_modes_;
|
||||
std::set<std::string> supported_custom_presets_;
|
||||
};
|
||||
|
||||
} // namespace climate
|
||||
|
||||
@@ -30,14 +30,12 @@ class DateTimeBase : public EntityBase {
|
||||
#endif
|
||||
};
|
||||
|
||||
#ifdef USE_TIME
|
||||
class DateTimeStateTrigger : public Trigger<ESPTime> {
|
||||
public:
|
||||
explicit DateTimeStateTrigger(DateTimeBase *parent) {
|
||||
parent->add_on_state_callback([this, parent]() { this->trigger(parent->state_as_esptime()); });
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace datetime
|
||||
} // namespace esphome
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
#include <esp_chip_info.h>
|
||||
#include <esp_partition.h>
|
||||
|
||||
#include <map>
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#include <Esp.h>
|
||||
#endif
|
||||
@@ -125,7 +123,12 @@ void DebugComponent::log_partition_info_() {
|
||||
|
||||
uint32_t DebugComponent::get_free_heap_() { return heap_caps_get_free_size(MALLOC_CAP_INTERNAL); }
|
||||
|
||||
static const std::map<int, const char *> CHIP_FEATURES = {
|
||||
struct ChipFeature {
|
||||
int bit;
|
||||
const char *name;
|
||||
};
|
||||
|
||||
static constexpr ChipFeature CHIP_FEATURES[] = {
|
||||
{CHIP_FEATURE_BLE, "BLE"},
|
||||
{CHIP_FEATURE_BT, "BT"},
|
||||
{CHIP_FEATURE_EMB_FLASH, "EMB Flash"},
|
||||
@@ -170,11 +173,13 @@ void DebugComponent::get_device_info_(std::string &device_info) {
|
||||
esp_chip_info(&info);
|
||||
const char *model = ESPHOME_VARIANT;
|
||||
std::string features;
|
||||
for (auto feature : CHIP_FEATURES) {
|
||||
if (info.features & feature.first) {
|
||||
features += feature.second;
|
||||
|
||||
// Check each known feature bit
|
||||
for (const auto &feature : CHIP_FEATURES) {
|
||||
if (info.features & feature.bit) {
|
||||
features += feature.name;
|
||||
features += ", ";
|
||||
info.features &= ~feature.first;
|
||||
info.features &= ~feature.bit;
|
||||
}
|
||||
}
|
||||
if (info.features != 0)
|
||||
|
||||
@@ -25,10 +25,37 @@ static void show_reset_reason(std::string &reset_reason, bool set, const char *r
|
||||
reset_reason += reason;
|
||||
}
|
||||
|
||||
inline uint32_t read_mem_u32(uintptr_t addr) {
|
||||
static inline uint32_t read_mem_u32(uintptr_t addr) {
|
||||
return *reinterpret_cast<volatile uint32_t *>(addr); // NOLINT(performance-no-int-to-ptr)
|
||||
}
|
||||
|
||||
static inline uint8_t read_mem_u8(uintptr_t addr) {
|
||||
return *reinterpret_cast<volatile uint8_t *>(addr); // NOLINT(performance-no-int-to-ptr)
|
||||
}
|
||||
|
||||
// defines from https://github.com/adafruit/Adafruit_nRF52_Bootloader which prints those information
|
||||
constexpr uint32_t SD_MAGIC_NUMBER = 0x51B1E5DB;
|
||||
constexpr uintptr_t MBR_SIZE = 0x1000;
|
||||
constexpr uintptr_t SOFTDEVICE_INFO_STRUCT_OFFSET = 0x2000;
|
||||
constexpr uintptr_t SD_ID_OFFSET = SOFTDEVICE_INFO_STRUCT_OFFSET + 0x10;
|
||||
constexpr uintptr_t SD_VERSION_OFFSET = SOFTDEVICE_INFO_STRUCT_OFFSET + 0x14;
|
||||
|
||||
static inline bool is_sd_present() {
|
||||
return read_mem_u32(SOFTDEVICE_INFO_STRUCT_OFFSET + MBR_SIZE + 4) == SD_MAGIC_NUMBER;
|
||||
}
|
||||
static inline uint32_t sd_id_get() {
|
||||
if (read_mem_u8(MBR_SIZE + SOFTDEVICE_INFO_STRUCT_OFFSET) > (SD_ID_OFFSET - SOFTDEVICE_INFO_STRUCT_OFFSET)) {
|
||||
return read_mem_u32(MBR_SIZE + SD_ID_OFFSET);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
static inline uint32_t sd_version_get() {
|
||||
if (read_mem_u8(MBR_SIZE + SOFTDEVICE_INFO_STRUCT_OFFSET) > (SD_VERSION_OFFSET - SOFTDEVICE_INFO_STRUCT_OFFSET)) {
|
||||
return read_mem_u32(MBR_SIZE + SD_VERSION_OFFSET);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::string DebugComponent::get_reset_reason_() {
|
||||
uint32_t cause;
|
||||
auto ret = hwinfo_get_reset_cause(&cause);
|
||||
@@ -271,6 +298,29 @@ void DebugComponent::get_device_info_(std::string &device_info) {
|
||||
NRF_UICR->NRFFW[0]);
|
||||
ESP_LOGD(TAG, "MBR param page addr 0x%08x, UICR param page addr 0x%08x", read_mem_u32(MBR_PARAM_PAGE_ADDR),
|
||||
NRF_UICR->NRFFW[1]);
|
||||
if (is_sd_present()) {
|
||||
uint32_t const sd_id = sd_id_get();
|
||||
uint32_t const sd_version = sd_version_get();
|
||||
|
||||
uint32_t ver[3];
|
||||
ver[0] = sd_version / 1000000;
|
||||
ver[1] = (sd_version - ver[0] * 1000000) / 1000;
|
||||
ver[2] = (sd_version - ver[0] * 1000000 - ver[1] * 1000);
|
||||
|
||||
ESP_LOGD(TAG, "SoftDevice: S%u %u.%u.%u", sd_id, ver[0], ver[1], ver[2]);
|
||||
#ifdef USE_SOFTDEVICE_ID
|
||||
#ifdef USE_SOFTDEVICE_VERSION
|
||||
if (USE_SOFTDEVICE_ID != sd_id || USE_SOFTDEVICE_VERSION != ver[0]) {
|
||||
ESP_LOGE(TAG, "Built for SoftDevice S%u %u.x.y. It may crash due to mismatch of bootloader version.",
|
||||
USE_SOFTDEVICE_ID, USE_SOFTDEVICE_VERSION);
|
||||
}
|
||||
#else
|
||||
if (USE_SOFTDEVICE_ID != sd_id) {
|
||||
ESP_LOGE(TAG, "Built for SoftDevice S%u. It may crash due to mismatch of bootloader version.", USE_SOFTDEVICE_ID);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
@@ -828,6 +828,9 @@ async def to_code(config):
|
||||
# Disable dynamic log level control to save memory
|
||||
add_idf_sdkconfig_option("CONFIG_LOG_DYNAMIC_LEVEL_CONTROL", False)
|
||||
|
||||
# Reduce PHY TX power in the event of a brownout
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_PHY_REDUCE_TX_POWER", True)
|
||||
|
||||
# Set default CPU frequency
|
||||
add_idf_sdkconfig_option(
|
||||
f"CONFIG_ESP_DEFAULT_CPU_FREQ_MHZ_{config[CONF_CPU_FREQUENCY][:-3]}", True
|
||||
|
||||
@@ -108,8 +108,13 @@ class BTLoggers(Enum):
|
||||
"""ESP32 WiFi provisioning over Bluetooth"""
|
||||
|
||||
|
||||
# Set to track which loggers are needed by components
|
||||
_required_loggers: set[BTLoggers] = set()
|
||||
# Key for storing required loggers in CORE.data
|
||||
ESP32_BLE_REQUIRED_LOGGERS_KEY = "esp32_ble_required_loggers"
|
||||
|
||||
|
||||
def _get_required_loggers() -> set[BTLoggers]:
|
||||
"""Get the set of required Bluetooth loggers from CORE.data."""
|
||||
return CORE.data.setdefault(ESP32_BLE_REQUIRED_LOGGERS_KEY, set())
|
||||
|
||||
|
||||
# Dataclass for handler registration counts
|
||||
@@ -170,12 +175,13 @@ def register_bt_logger(*loggers: BTLoggers) -> None:
|
||||
Args:
|
||||
*loggers: One or more BTLoggers enum members
|
||||
"""
|
||||
required_loggers = _get_required_loggers()
|
||||
for logger in loggers:
|
||||
if not isinstance(logger, BTLoggers):
|
||||
raise TypeError(
|
||||
f"Logger must be a BTLoggers enum member, got {type(logger)}"
|
||||
)
|
||||
_required_loggers.add(logger)
|
||||
required_loggers.add(logger)
|
||||
|
||||
|
||||
CONF_BLE_ID = "ble_id"
|
||||
@@ -488,8 +494,9 @@ async def to_code(config):
|
||||
# Apply logger settings if log disabling is enabled
|
||||
if config.get(CONF_DISABLE_BT_LOGS, False):
|
||||
# Disable all Bluetooth loggers that are not required
|
||||
required_loggers = _get_required_loggers()
|
||||
for logger in BTLoggers:
|
||||
if logger not in _required_loggers:
|
||||
if logger not in required_loggers:
|
||||
add_idf_sdkconfig_option(f"{logger.value}_NONE", True)
|
||||
|
||||
# Set BLE connection establishment timeout to match aioesphomeapi/bleak-retry-connector
|
||||
|
||||
@@ -60,11 +60,21 @@ class RegistrationCounts:
|
||||
clients: int = 0
|
||||
|
||||
|
||||
# Set to track which features are needed by components
|
||||
_required_features: set[BLEFeatures] = set()
|
||||
# CORE.data keys for state management
|
||||
ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY = "esp32_ble_tracker_required_features"
|
||||
ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY = "esp32_ble_tracker_registration_counts"
|
||||
|
||||
# Track registration counts for StaticVector sizing
|
||||
_registration_counts = RegistrationCounts()
|
||||
|
||||
def _get_required_features() -> set[BLEFeatures]:
|
||||
"""Get the set of required BLE features from CORE.data."""
|
||||
return CORE.data.setdefault(ESP32_BLE_TRACKER_REQUIRED_FEATURES_KEY, set())
|
||||
|
||||
|
||||
def _get_registration_counts() -> RegistrationCounts:
|
||||
"""Get the registration counts from CORE.data."""
|
||||
return CORE.data.setdefault(
|
||||
ESP32_BLE_TRACKER_REGISTRATION_COUNTS_KEY, RegistrationCounts()
|
||||
)
|
||||
|
||||
|
||||
def register_ble_features(features: set[BLEFeatures]) -> None:
|
||||
@@ -73,7 +83,7 @@ def register_ble_features(features: set[BLEFeatures]) -> None:
|
||||
Args:
|
||||
features: Set of BLEFeatures enum members
|
||||
"""
|
||||
_required_features.update(features)
|
||||
_get_required_features().update(features)
|
||||
|
||||
|
||||
esp32_ble_tracker_ns = cg.esphome_ns.namespace("esp32_ble_tracker")
|
||||
@@ -267,15 +277,17 @@ async def to_code(config):
|
||||
):
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
|
||||
registration_counts = _get_registration_counts()
|
||||
|
||||
for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if CONF_MAC_ADDRESS in conf:
|
||||
addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
|
||||
cg.add(trigger.set_addresses(addr_list))
|
||||
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_SERVICE_UUID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_service_uuid16(as_hex(conf[CONF_SERVICE_UUID])))
|
||||
@@ -288,7 +300,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_MANUFACTURER_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_MANUFACTURER_ID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_manufacturer_uuid16(as_hex(conf[CONF_MANUFACTURER_ID])))
|
||||
@@ -301,7 +313,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_SCAN_END, []):
|
||||
_registration_counts.listeners += 1
|
||||
registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
@@ -331,19 +343,21 @@ async def to_code(config):
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def _add_ble_features():
|
||||
# Add feature-specific defines based on what's needed
|
||||
if BLEFeatures.ESP_BT_DEVICE in _required_features:
|
||||
required_features = _get_required_features()
|
||||
if BLEFeatures.ESP_BT_DEVICE in required_features:
|
||||
cg.add_define("USE_ESP32_BLE_DEVICE")
|
||||
cg.add_define("USE_ESP32_BLE_UUID")
|
||||
|
||||
# Add defines for StaticVector sizing based on registration counts
|
||||
# Only define if count > 0 to avoid allocating unnecessary memory
|
||||
if _registration_counts.listeners > 0:
|
||||
registration_counts = _get_registration_counts()
|
||||
if registration_counts.listeners > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", _registration_counts.listeners
|
||||
"ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", registration_counts.listeners
|
||||
)
|
||||
if _registration_counts.clients > 0:
|
||||
if registration_counts.clients > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", _registration_counts.clients
|
||||
"ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", registration_counts.clients
|
||||
)
|
||||
|
||||
|
||||
@@ -395,7 +409,7 @@ async def register_ble_device(
|
||||
var: cg.SafeExpType, config: ConfigType
|
||||
) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.listeners += 1
|
||||
_get_registration_counts().listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -403,7 +417,7 @@ async def register_ble_device(
|
||||
|
||||
async def register_client(var: cg.SafeExpType, config: ConfigType) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.clients += 1
|
||||
_get_registration_counts().clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
@@ -417,7 +431,7 @@ async def register_raw_ble_device(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.listeners += 1
|
||||
_get_registration_counts().listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -431,7 +445,7 @@ async def register_raw_client(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.clients += 1
|
||||
_get_registration_counts().clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
|
||||
@@ -190,7 +190,7 @@ async def to_code(config):
|
||||
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
||||
cg.add_define(ThreadModel.SINGLE)
|
||||
|
||||
cg.add_platformio_option("extra_scripts", ["post:post_build.py"])
|
||||
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
|
||||
|
||||
conf = config[CONF_FRAMEWORK]
|
||||
cg.add_platformio_option("framework", "arduino")
|
||||
@@ -230,6 +230,12 @@ async def to_code(config):
|
||||
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
||||
cg.add_build_flag("-DNEW_OOM_ABORT")
|
||||
|
||||
# In testing mode, fake a larger IRAM to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
|
||||
# This is done via a pre-build script that generates a custom linker script
|
||||
if CORE.testing_mode:
|
||||
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||
|
||||
cg.add_platformio_option("board_build.flash_mode", config[CONF_BOARD_FLASH_MODE])
|
||||
|
||||
ver: cv.Version = CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION]
|
||||
@@ -265,3 +271,8 @@ def copy_files():
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
)
|
||||
iram_fix_file = dir / "iram_fix.py.script"
|
||||
copy_file_if_changed(
|
||||
iram_fix_file,
|
||||
CORE.relative_build_path("iram_fix.py"),
|
||||
)
|
||||
|
||||
44
esphome/components/esp8266/iram_fix.py.script
Normal file
44
esphome/components/esp8266/iram_fix.py.script
Normal file
@@ -0,0 +1,44 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
def patch_linker_script_after_preprocess(source, target, env):
|
||||
"""Patch the local linker script after PlatformIO preprocesses it."""
|
||||
# Check if we're in testing mode by looking for the define
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Get the local linker script path
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
|
||||
|
||||
if not os.path.exists(local_ld):
|
||||
return
|
||||
|
||||
# Read the linker script
|
||||
with open(local_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
|
||||
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||
updated = re.sub(
|
||||
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
|
||||
r"\g<1>0x200000",
|
||||
content,
|
||||
)
|
||||
|
||||
if updated != content:
|
||||
with open(local_ld, "w") as f:
|
||||
f.write(updated)
|
||||
print("ESPHome: Patched IRAM size to 2MB for testing mode")
|
||||
|
||||
|
||||
# Hook into the build process right before linking
|
||||
# This runs after PlatformIO has already preprocessed the linker scripts
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)
|
||||
@@ -19,6 +19,7 @@ from esphome.const import (
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
import esphome.final_validate as fv
|
||||
from esphome.types import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -136,11 +137,12 @@ FINAL_VALIDATE_SCHEMA = ota_esphome_final_validate
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.OTA_UPDATES)
|
||||
async def to_code(config):
|
||||
async def to_code(config: ConfigType) -> None:
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_port(config[CONF_PORT]))
|
||||
|
||||
if CONF_PASSWORD in config:
|
||||
# Password could be set to an empty string and we can assume that means no password
|
||||
if config.get(CONF_PASSWORD):
|
||||
cg.add(var.set_auth_password(config[CONF_PASSWORD]))
|
||||
cg.add_define("USE_OTA_PASSWORD")
|
||||
# Only include hash algorithms when password is configured
|
||||
|
||||
@@ -9,8 +9,8 @@ static const char *const TAG = "htu21d";
|
||||
|
||||
static const uint8_t HTU21D_ADDRESS = 0x40;
|
||||
static const uint8_t HTU21D_REGISTER_RESET = 0xFE;
|
||||
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xE3;
|
||||
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xE5;
|
||||
static const uint8_t HTU21D_REGISTER_TEMPERATURE = 0xF3;
|
||||
static const uint8_t HTU21D_REGISTER_HUMIDITY = 0xF5;
|
||||
static const uint8_t HTU21D_WRITERHT_REG_CMD = 0xE6; /**< Write RH/T User Register 1 */
|
||||
static const uint8_t HTU21D_REGISTER_STATUS = 0xE7;
|
||||
static const uint8_t HTU21D_WRITEHEATER_REG_CMD = 0x51; /**< Write Heater Control Register */
|
||||
|
||||
@@ -143,7 +143,18 @@ def validate_mclk_divisible_by_3(config):
|
||||
return config
|
||||
|
||||
|
||||
_use_legacy_driver = None
|
||||
# Key for storing legacy driver setting in CORE.data
|
||||
I2S_USE_LEGACY_DRIVER_KEY = "i2s_use_legacy_driver"
|
||||
|
||||
|
||||
def _get_use_legacy_driver():
|
||||
"""Get the legacy driver setting from CORE.data."""
|
||||
return CORE.data.get(I2S_USE_LEGACY_DRIVER_KEY)
|
||||
|
||||
|
||||
def _set_use_legacy_driver(value: bool) -> None:
|
||||
"""Set the legacy driver setting in CORE.data."""
|
||||
CORE.data[I2S_USE_LEGACY_DRIVER_KEY] = value
|
||||
|
||||
|
||||
def i2s_audio_component_schema(
|
||||
@@ -209,17 +220,15 @@ async def register_i2s_audio_component(var, config):
|
||||
|
||||
|
||||
def validate_use_legacy(value):
|
||||
global _use_legacy_driver # noqa: PLW0603
|
||||
if CONF_USE_LEGACY in value:
|
||||
if (_use_legacy_driver is not None) and (
|
||||
_use_legacy_driver != value[CONF_USE_LEGACY]
|
||||
):
|
||||
existing_value = _get_use_legacy_driver()
|
||||
if (existing_value is not None) and (existing_value != value[CONF_USE_LEGACY]):
|
||||
raise cv.Invalid(
|
||||
f"All i2s_audio components must set {CONF_USE_LEGACY} to the same value."
|
||||
)
|
||||
if (not value[CONF_USE_LEGACY]) and (CORE.using_arduino):
|
||||
raise cv.Invalid("Arduino supports only the legacy i2s driver")
|
||||
_use_legacy_driver = value[CONF_USE_LEGACY]
|
||||
_set_use_legacy_driver(value[CONF_USE_LEGACY])
|
||||
return value
|
||||
|
||||
|
||||
@@ -249,7 +258,8 @@ def _final_validate(_):
|
||||
|
||||
|
||||
def use_legacy():
|
||||
return not (CORE.using_esp_idf and not _use_legacy_driver)
|
||||
legacy_driver = _get_use_legacy_driver()
|
||||
return not (CORE.using_esp_idf and not legacy_driver)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
|
||||
@@ -35,6 +35,7 @@ CONF_CHARGE = "charge"
|
||||
CONF_CHARGE_COULOMBS = "charge_coulombs"
|
||||
CONF_ENERGY_JOULES = "energy_joules"
|
||||
CONF_TEMPERATURE_COEFFICIENT = "temperature_coefficient"
|
||||
CONF_RESET_ON_BOOT = "reset_on_boot"
|
||||
UNIT_AMPERE_HOURS = "Ah"
|
||||
UNIT_COULOMB = "C"
|
||||
UNIT_JOULE = "J"
|
||||
@@ -113,6 +114,7 @@ INA2XX_SCHEMA = cv.Schema(
|
||||
cv.Optional(CONF_TEMPERATURE_COEFFICIENT, default=0): cv.int_range(
|
||||
min=0, max=16383
|
||||
),
|
||||
cv.Optional(CONF_RESET_ON_BOOT, default=True): cv.boolean,
|
||||
cv.Optional(CONF_SHUNT_VOLTAGE): cv.maybe_simple_value(
|
||||
sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_MILLIVOLT,
|
||||
@@ -206,6 +208,7 @@ async def setup_ina2xx(var, config):
|
||||
cg.add(var.set_adc_range(config[CONF_ADC_RANGE]))
|
||||
cg.add(var.set_adc_avg_samples(config[CONF_ADC_AVERAGING]))
|
||||
cg.add(var.set_shunt_tempco(config[CONF_TEMPERATURE_COEFFICIENT]))
|
||||
cg.add(var.set_reset_on_boot(config[CONF_RESET_ON_BOOT]))
|
||||
|
||||
adc_time_config = config[CONF_ADC_TIME]
|
||||
if isinstance(adc_time_config, dict):
|
||||
|
||||
@@ -257,7 +257,12 @@ bool INA2XX::reset_energy_counters() {
|
||||
bool INA2XX::reset_config_() {
|
||||
ESP_LOGV(TAG, "Reset");
|
||||
ConfigurationRegister cfg{0};
|
||||
cfg.RST = true;
|
||||
if (!this->reset_on_boot_) {
|
||||
ESP_LOGI(TAG, "Skipping on-boot device reset");
|
||||
cfg.RST = false;
|
||||
} else {
|
||||
cfg.RST = true;
|
||||
}
|
||||
return this->write_unsigned_16_(RegisterMap::REG_CONFIG, cfg.raw_u16);
|
||||
}
|
||||
|
||||
|
||||
@@ -127,6 +127,7 @@ class INA2XX : public PollingComponent {
|
||||
void set_adc_time_die_temperature(AdcTime time) { this->adc_time_die_temperature_ = time; }
|
||||
void set_adc_avg_samples(AdcAvgSamples samples) { this->adc_avg_samples_ = samples; }
|
||||
void set_shunt_tempco(uint16_t coeff) { this->shunt_tempco_ppm_c_ = coeff; }
|
||||
void set_reset_on_boot(bool reset) { this->reset_on_boot_ = reset; }
|
||||
|
||||
void set_shunt_voltage_sensor(sensor::Sensor *sensor) { this->shunt_voltage_sensor_ = sensor; }
|
||||
void set_bus_voltage_sensor(sensor::Sensor *sensor) { this->bus_voltage_sensor_ = sensor; }
|
||||
@@ -172,6 +173,7 @@ class INA2XX : public PollingComponent {
|
||||
AdcTime adc_time_die_temperature_{AdcTime::ADC_TIME_4120US};
|
||||
AdcAvgSamples adc_avg_samples_{AdcAvgSamples::ADC_AVG_SAMPLES_128};
|
||||
uint16_t shunt_tempco_ppm_c_{0};
|
||||
bool reset_on_boot_{true};
|
||||
|
||||
//
|
||||
// Calculated coefficients
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import pins
|
||||
@@ -48,6 +49,7 @@ from .gpio import nrf52_pin_to_code # noqa
|
||||
CODEOWNERS = ["@tomaszduda23"]
|
||||
AUTO_LOAD = ["zephyr"]
|
||||
IS_TARGET_PLATFORM = True
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def set_core_data(config: ConfigType) -> ConfigType:
|
||||
@@ -127,6 +129,10 @@ def _validate_mcumgr(config):
|
||||
def _final_validate(config):
|
||||
if CONF_DFU in config:
|
||||
_validate_mcumgr(config)
|
||||
if config[KEY_BOOTLOADER] == BOOTLOADER_ADAFRUIT:
|
||||
_LOGGER.warning(
|
||||
"Selected generic Adafruit bootloader. The board might crash. Consider settings `bootloader:`"
|
||||
)
|
||||
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = _final_validate
|
||||
@@ -157,6 +163,13 @@ async def to_code(config: ConfigType) -> None:
|
||||
if config[KEY_BOOTLOADER] == BOOTLOADER_MCUBOOT:
|
||||
cg.add_define("USE_BOOTLOADER_MCUBOOT")
|
||||
else:
|
||||
if "_sd" in config[KEY_BOOTLOADER]:
|
||||
bootloader = config[KEY_BOOTLOADER].split("_")
|
||||
sd_id = bootloader[2][2:]
|
||||
cg.add_define("USE_SOFTDEVICE_ID", int(sd_id))
|
||||
if (len(bootloader)) > 3:
|
||||
sd_version = bootloader[3][1:]
|
||||
cg.add_define("USE_SOFTDEVICE_VERSION", int(sd_version))
|
||||
# make sure that firmware.zip is created
|
||||
# for Adafruit_nRF52_Bootloader
|
||||
cg.add_platformio_option("board_upload.protocol", "nrfutil")
|
||||
|
||||
@@ -11,10 +11,18 @@ from .const import (
|
||||
BOARDS_ZEPHYR = {
|
||||
"adafruit_itsybitsy_nrf52840": {
|
||||
KEY_BOOTLOADER: [
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V6,
|
||||
BOOTLOADER_ADAFRUIT,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD132,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
]
|
||||
},
|
||||
"xiao_ble": {
|
||||
KEY_BOOTLOADER: [
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
BOOTLOADER_ADAFRUIT,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD132,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V6,
|
||||
BOOTLOADER_ADAFRUIT_NRF52_SD140_V7,
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
@@ -66,6 +66,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
@@ -130,6 +131,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
DEVICE_CLASS_VOLTAGE,
|
||||
|
||||
@@ -1056,6 +1056,52 @@ async def sony_action(var, config, args):
|
||||
cg.add(var.set_nbits(template_))
|
||||
|
||||
|
||||
# Symphony
|
||||
SymphonyData, SymphonyBinarySensor, SymphonyTrigger, SymphonyAction, SymphonyDumper = (
|
||||
declare_protocol("Symphony")
|
||||
)
|
||||
SYMPHONY_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_DATA): cv.hex_uint32_t,
|
||||
cv.Required(CONF_NBITS): cv.int_range(min=1, max=32),
|
||||
cv.Optional(CONF_COMMAND_REPEATS, default=2): cv.uint8_t,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@register_binary_sensor("symphony", SymphonyBinarySensor, SYMPHONY_SCHEMA)
|
||||
def symphony_binary_sensor(var, config):
|
||||
cg.add(
|
||||
var.set_data(
|
||||
cg.StructInitializer(
|
||||
SymphonyData,
|
||||
("data", config[CONF_DATA]),
|
||||
("nbits", config[CONF_NBITS]),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@register_trigger("symphony", SymphonyTrigger, SymphonyData)
|
||||
def symphony_trigger(var, config):
|
||||
pass
|
||||
|
||||
|
||||
@register_dumper("symphony", SymphonyDumper)
|
||||
def symphony_dumper(var, config):
|
||||
pass
|
||||
|
||||
|
||||
@register_action("symphony", SymphonyAction, SYMPHONY_SCHEMA)
|
||||
async def symphony_action(var, config, args):
|
||||
template_ = await cg.templatable(config[CONF_DATA], args, cg.uint32)
|
||||
cg.add(var.set_data(template_))
|
||||
template_ = await cg.templatable(config[CONF_NBITS], args, cg.uint32)
|
||||
cg.add(var.set_nbits(template_))
|
||||
template_ = await cg.templatable(config[CONF_COMMAND_REPEATS], args, cg.uint8)
|
||||
cg.add(var.set_repeats(template_))
|
||||
|
||||
|
||||
# Raw
|
||||
def validate_raw_alternating(value):
|
||||
assert isinstance(value, list)
|
||||
|
||||
120
esphome/components/remote_base/symphony_protocol.cpp
Normal file
120
esphome/components/remote_base/symphony_protocol.cpp
Normal file
@@ -0,0 +1,120 @@
|
||||
#include "symphony_protocol.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace remote_base {
|
||||
|
||||
static const char *const TAG = "remote.symphony";
|
||||
|
||||
// Reference implementation and timing details:
|
||||
// IRremoteESP8266 ir_Symphony.cpp
|
||||
// https://github.com/crankyoldgit/IRremoteESP8266/blob/master/src/ir_Symphony.cpp
|
||||
// The implementation below mirrors the constant bit-time mapping and
|
||||
// footer-gap handling used there.
|
||||
|
||||
// Symphony protocol timing specifications (tuned to handset captures)
|
||||
static const uint32_t BIT_ZERO_HIGH_US = 460; // short
|
||||
static const uint32_t BIT_ZERO_LOW_US = 1260; // long
|
||||
static const uint32_t BIT_ONE_HIGH_US = 1260; // long
|
||||
static const uint32_t BIT_ONE_LOW_US = 460; // short
|
||||
static const uint32_t CARRIER_FREQUENCY = 38000;
|
||||
|
||||
// IRremoteESP8266 reference: kSymphonyFooterGap = 4 * (mark + space)
|
||||
static const uint32_t FOOTER_GAP_US = 4 * (BIT_ZERO_HIGH_US + BIT_ZERO_LOW_US);
|
||||
// Typical inter-frame gap (~34.8 ms observed)
|
||||
static const uint32_t INTER_FRAME_GAP_US = 34760;
|
||||
|
||||
void SymphonyProtocol::encode(RemoteTransmitData *dst, const SymphonyData &data) {
|
||||
dst->set_carrier_frequency(CARRIER_FREQUENCY);
|
||||
ESP_LOGD(TAG, "Sending Symphony: data=0x%0*X nbits=%u repeats=%u", (data.nbits + 3) / 4, (uint32_t) data.data,
|
||||
data.nbits, data.repeats);
|
||||
// Each bit produces a mark+space (2 entries). We fold the inter-frame/footer gap
|
||||
// into the last bit's space of each frame to avoid over-length gaps.
|
||||
dst->reserve(data.nbits * 2u * data.repeats);
|
||||
|
||||
for (uint8_t repeats = 0; repeats < data.repeats; repeats++) {
|
||||
// Data bits (MSB first)
|
||||
for (uint32_t mask = 1UL << (data.nbits - 1); mask != 0; mask >>= 1) {
|
||||
const bool is_last_bit = (mask == 1);
|
||||
const bool is_last_frame = (repeats == (data.repeats - 1));
|
||||
if (is_last_bit) {
|
||||
// Emit last bit's mark; replace its space with the proper gap
|
||||
if (data.data & mask) {
|
||||
dst->mark(BIT_ONE_HIGH_US);
|
||||
} else {
|
||||
dst->mark(BIT_ZERO_HIGH_US);
|
||||
}
|
||||
dst->space(is_last_frame ? FOOTER_GAP_US : INTER_FRAME_GAP_US);
|
||||
} else {
|
||||
if (data.data & mask) {
|
||||
dst->item(BIT_ONE_HIGH_US, BIT_ONE_LOW_US);
|
||||
} else {
|
||||
dst->item(BIT_ZERO_HIGH_US, BIT_ZERO_LOW_US);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
optional<SymphonyData> SymphonyProtocol::decode(RemoteReceiveData src) {
|
||||
auto is_valid_len = [](uint8_t nbits) -> bool { return nbits == 8 || nbits == 12 || nbits == 16; };
|
||||
|
||||
RemoteReceiveData s = src; // copy
|
||||
SymphonyData out{0, 0, 1};
|
||||
|
||||
for (; out.nbits < 32; out.nbits++) {
|
||||
if (s.expect_mark(BIT_ONE_HIGH_US)) {
|
||||
if (!s.expect_space(BIT_ONE_LOW_US)) {
|
||||
// Allow footer gap immediately after the last mark
|
||||
if (s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
uint8_t bits_with_this = out.nbits + 1;
|
||||
if (is_valid_len(bits_with_this)) {
|
||||
out.data = (out.data << 1UL) | 1UL;
|
||||
out.nbits = bits_with_this;
|
||||
return out;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
// Successfully consumed a '1' bit (mark + space)
|
||||
out.data = (out.data << 1UL) | 1UL;
|
||||
continue;
|
||||
} else if (s.expect_mark(BIT_ZERO_HIGH_US)) {
|
||||
if (!s.expect_space(BIT_ZERO_LOW_US)) {
|
||||
// Allow footer gap immediately after the last mark
|
||||
if (s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
uint8_t bits_with_this = out.nbits + 1;
|
||||
if (is_valid_len(bits_with_this)) {
|
||||
out.data = (out.data << 1UL) | 0UL;
|
||||
out.nbits = bits_with_this;
|
||||
return out;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
// Successfully consumed a '0' bit (mark + space)
|
||||
out.data = (out.data << 1UL) | 0UL;
|
||||
continue;
|
||||
} else {
|
||||
// Completed a valid-length frame followed by a footer gap
|
||||
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
return out;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
if (is_valid_len(out.nbits) && s.peek_space_at_least(FOOTER_GAP_US)) {
|
||||
return out;
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
void SymphonyProtocol::dump(const SymphonyData &data) {
|
||||
const int32_t hex_width = (data.nbits + 3) / 4; // pad to nibble width
|
||||
ESP_LOGI(TAG, "Received Symphony: data=0x%0*X, nbits=%d", hex_width, (uint32_t) data.data, data.nbits);
|
||||
}
|
||||
|
||||
} // namespace remote_base
|
||||
} // namespace esphome
|
||||
44
esphome/components/remote_base/symphony_protocol.h
Normal file
44
esphome/components/remote_base/symphony_protocol.h
Normal file
@@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "remote_base.h"
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace esphome {
|
||||
namespace remote_base {
|
||||
|
||||
struct SymphonyData {
|
||||
uint32_t data;
|
||||
uint8_t nbits;
|
||||
uint8_t repeats{1};
|
||||
|
||||
bool operator==(const SymphonyData &rhs) const { return data == rhs.data && nbits == rhs.nbits; }
|
||||
};
|
||||
|
||||
class SymphonyProtocol : public RemoteProtocol<SymphonyData> {
|
||||
public:
|
||||
void encode(RemoteTransmitData *dst, const SymphonyData &data) override;
|
||||
optional<SymphonyData> decode(RemoteReceiveData src) override;
|
||||
void dump(const SymphonyData &data) override;
|
||||
};
|
||||
|
||||
DECLARE_REMOTE_PROTOCOL(Symphony)
|
||||
|
||||
template<typename... Ts> class SymphonyAction : public RemoteTransmitterActionBase<Ts...> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint32_t, data)
|
||||
TEMPLATABLE_VALUE(uint8_t, nbits)
|
||||
TEMPLATABLE_VALUE(uint8_t, repeats)
|
||||
|
||||
void encode(RemoteTransmitData *dst, Ts... x) override {
|
||||
SymphonyData data{};
|
||||
data.data = this->data_.value(x...);
|
||||
data.nbits = this->nbits_.value(x...);
|
||||
data.repeats = this->repeats_.value(x...);
|
||||
SymphonyProtocol().encode(dst, data);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace remote_base
|
||||
} // namespace esphome
|
||||
@@ -89,6 +89,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
@@ -157,6 +158,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_SPEED,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA,
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS,
|
||||
DEVICE_CLASS_VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
|
||||
from esphome import core
|
||||
from esphome.config_helpers import Extend, Remove, merge_config
|
||||
from esphome.config_helpers import Extend, Remove, merge_config, merge_dicts_ordered
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import CONF_SUBSTITUTIONS, VALID_SUBSTITUTIONS_CHARACTERS
|
||||
from esphome.yaml_util import ESPHomeDataBase, ESPLiteralValue, make_data_base
|
||||
@@ -170,10 +170,10 @@ def do_substitution_pass(config, command_line_substitutions, ignore_missing=Fals
|
||||
return
|
||||
|
||||
# Merge substitutions in config, overriding with substitutions coming from command line:
|
||||
substitutions = {
|
||||
**config.get(CONF_SUBSTITUTIONS, {}),
|
||||
**(command_line_substitutions or {}),
|
||||
}
|
||||
# Use merge_dicts_ordered to preserve OrderedDict type for move_to_end()
|
||||
substitutions = merge_dicts_ordered(
|
||||
config.get(CONF_SUBSTITUTIONS, {}), command_line_substitutions or {}
|
||||
)
|
||||
with cv.prepend_path("substitutions"):
|
||||
if not isinstance(substitutions, dict):
|
||||
raise cv.Invalid(
|
||||
|
||||
@@ -71,9 +71,14 @@ from esphome.const import (
|
||||
CONF_VISUAL,
|
||||
)
|
||||
|
||||
CONF_PRESET_CHANGE = "preset_change"
|
||||
CONF_DEFAULT_PRESET = "default_preset"
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION = "humidity_control_dehumidify_action"
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION = "humidity_control_humidify_action"
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION = "humidity_control_off_action"
|
||||
CONF_HUMIDITY_HYSTERESIS = "humidity_hysteresis"
|
||||
CONF_ON_BOOT_RESTORE_FROM = "on_boot_restore_from"
|
||||
CONF_PRESET_CHANGE = "preset_change"
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION = "target_humidity_change_action"
|
||||
|
||||
CODEOWNERS = ["@kbx81"]
|
||||
|
||||
@@ -241,6 +246,14 @@ def validate_thermostat(config):
|
||||
CONF_MAX_HEATING_RUN_TIME,
|
||||
CONF_SUPPLEMENTAL_HEATING_ACTION,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION: [
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION,
|
||||
CONF_HUMIDITY_SENSOR,
|
||||
],
|
||||
}
|
||||
for config_trigger, req_triggers in requirements.items():
|
||||
for req_trigger in req_triggers:
|
||||
@@ -338,7 +351,7 @@ def validate_thermostat(config):
|
||||
# Warn about using the removed CONF_DEFAULT_MODE and advise users
|
||||
if CONF_DEFAULT_MODE in config and config[CONF_DEFAULT_MODE] is not None:
|
||||
raise cv.Invalid(
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}."
|
||||
f"{CONF_DEFAULT_MODE} is no longer valid. Please switch to using presets and specify a {CONF_DEFAULT_PRESET}"
|
||||
)
|
||||
|
||||
default_mode = config[CONF_DEFAULT_MODE]
|
||||
@@ -588,9 +601,24 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(CONF_SWING_VERTICAL_ACTION): automation.validate_automation(
|
||||
single=True
|
||||
),
|
||||
cv.Optional(
|
||||
CONF_TARGET_HUMIDITY_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_TARGET_TEMPERATURE_CHANGE_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Exclusive(
|
||||
CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION,
|
||||
group_of_exclusion="humidity_control",
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(
|
||||
CONF_HUMIDITY_CONTROL_OFF_ACTION
|
||||
): automation.validate_automation(single=True),
|
||||
cv.Optional(CONF_HUMIDITY_HYSTERESIS, default=1.0): cv.percentage,
|
||||
cv.Optional(CONF_DEFAULT_MODE, default=None): cv.valid,
|
||||
cv.Optional(CONF_DEFAULT_PRESET): cv.templatable(cv.string),
|
||||
cv.Optional(CONF_DEFAULT_TARGET_TEMPERATURE_HIGH): cv.temperature,
|
||||
@@ -882,12 +910,39 @@ async def to_code(config):
|
||||
config[CONF_SWING_VERTICAL_ACTION],
|
||||
)
|
||||
cg.add(var.set_supports_swing_mode_vertical(True))
|
||||
if CONF_TARGET_HUMIDITY_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_HUMIDITY_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_TARGET_TEMPERATURE_CHANGE_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_temperature_change_trigger(),
|
||||
[],
|
||||
config[CONF_TARGET_TEMPERATURE_CHANGE_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_dehumidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_dehumidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_DEHUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION in config:
|
||||
cg.add(var.set_supports_humidification(True))
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_humidify_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_HUMIDIFY_ACTION],
|
||||
)
|
||||
if CONF_HUMIDITY_CONTROL_OFF_ACTION in config:
|
||||
await automation.build_automation(
|
||||
var.get_humidity_control_off_action_trigger(),
|
||||
[],
|
||||
config[CONF_HUMIDITY_CONTROL_OFF_ACTION],
|
||||
)
|
||||
cg.add(var.set_humidity_hysteresis(config[CONF_HUMIDITY_HYSTERESIS]))
|
||||
|
||||
if CONF_PRESET in config:
|
||||
for preset_config in config[CONF_PRESET]:
|
||||
|
||||
@@ -32,6 +32,7 @@ void ThermostatClimate::setup() {
|
||||
if (this->humidity_sensor_ != nullptr) {
|
||||
this->humidity_sensor_->add_on_state_callback([this](float state) {
|
||||
this->current_humidity = state;
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->publish_state();
|
||||
});
|
||||
this->current_humidity = this->humidity_sensor_->state;
|
||||
@@ -84,6 +85,8 @@ void ThermostatClimate::refresh() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
this->switch_to_fan_mode_(this->fan_mode.value(), false);
|
||||
this->switch_to_swing_mode_(this->swing_mode, false);
|
||||
this->switch_to_humidity_control_action_(this->compute_humidity_control_action_());
|
||||
this->check_humidity_change_trigger_();
|
||||
this->check_temperature_change_trigger_();
|
||||
this->publish_state();
|
||||
}
|
||||
@@ -129,6 +132,11 @@ bool ThermostatClimate::hysteresis_valid() {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidity_hysteresis_valid() {
|
||||
return !std::isnan(this->humidity_hysteresis_) && this->humidity_hysteresis_ >= 0.0f &&
|
||||
this->humidity_hysteresis_ < 100.0f;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::limit_setpoints_for_heat_cool() {
|
||||
return this->mode == climate::CLIMATE_MODE_HEAT_COOL ||
|
||||
(this->mode == climate::CLIMATE_MODE_AUTO && this->supports_heat_cool_);
|
||||
@@ -189,6 +197,16 @@ void ThermostatClimate::validate_target_temperature_high() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::validate_target_humidity() {
|
||||
if (std::isnan(this->target_humidity)) {
|
||||
this->target_humidity =
|
||||
(this->get_traits().get_visual_max_humidity() - this->get_traits().get_visual_min_humidity()) / 2.0f;
|
||||
} else {
|
||||
this->target_humidity = clamp<float>(this->target_humidity, this->get_traits().get_visual_min_humidity(),
|
||||
this->get_traits().get_visual_max_humidity());
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
bool target_temperature_high_changed = false;
|
||||
|
||||
@@ -235,15 +253,27 @@ void ThermostatClimate::control(const climate::ClimateCall &call) {
|
||||
this->validate_target_temperature();
|
||||
}
|
||||
}
|
||||
if (call.get_target_humidity().has_value()) {
|
||||
this->target_humidity = call.get_target_humidity().value();
|
||||
this->validate_target_humidity();
|
||||
}
|
||||
// make any changes happen
|
||||
this->refresh();
|
||||
}
|
||||
|
||||
climate::ClimateTraits ThermostatClimate::traits() {
|
||||
auto traits = climate::ClimateTraits();
|
||||
traits.set_supports_current_temperature(true);
|
||||
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_ACTION | climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE);
|
||||
|
||||
if (this->supports_two_points_)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE);
|
||||
|
||||
if (this->humidity_sensor_ != nullptr)
|
||||
traits.set_supports_current_humidity(true);
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY);
|
||||
|
||||
if (this->supports_humidification_ || this->supports_dehumidification_)
|
||||
traits.add_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY);
|
||||
|
||||
if (this->supports_auto_)
|
||||
traits.add_supported_mode(climate::CLIMATE_MODE_AUTO);
|
||||
@@ -294,9 +324,6 @@ climate::ClimateTraits ThermostatClimate::traits() {
|
||||
for (auto &it : this->custom_preset_config_) {
|
||||
traits.add_supported_custom_preset(it.first);
|
||||
}
|
||||
|
||||
traits.set_supports_two_point_target_temperature(this->supports_two_points_);
|
||||
traits.set_supports_action(true);
|
||||
return traits;
|
||||
}
|
||||
|
||||
@@ -421,6 +448,28 @@ climate::ClimateAction ThermostatClimate::compute_supplemental_action_() {
|
||||
return target_action;
|
||||
}
|
||||
|
||||
HumidificationAction ThermostatClimate::compute_humidity_control_action_() {
|
||||
auto target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// if hysteresis value or current_humidity is not valid, we go to OFF
|
||||
if (std::isnan(this->current_humidity) || !this->humidity_hysteresis_valid()) {
|
||||
return THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
}
|
||||
|
||||
// ensure set point is valid before computing the action
|
||||
this->validate_target_humidity();
|
||||
// everything has been validated so we can now safely compute the action
|
||||
if (this->dehumidification_required_() && this->humidification_required_()) {
|
||||
// this is bad and should never happen, so just stop.
|
||||
// target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
} else if (this->supports_dehumidification_ && this->dehumidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
} else if (this->supports_humidification_ && this->humidification_required_()) {
|
||||
target_action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
return target_action;
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_action_(climate::ClimateAction action, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->action) && this->setup_complete_) {
|
||||
@@ -594,6 +643,44 @@ void ThermostatClimate::trigger_supplemental_action_() {
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_humidity_control_action_(HumidificationAction action) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((action == this->humidification_action_) && this->setup_complete_) {
|
||||
// already in target mode
|
||||
return;
|
||||
}
|
||||
|
||||
Trigger<> *trig = this->humidity_control_off_action_trigger_;
|
||||
switch (action) {
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF:
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFICATION_OFF action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY:
|
||||
trig = this->humidity_control_dehumidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to DEHUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY:
|
||||
trig = this->humidity_control_humidify_action_trigger_;
|
||||
ESP_LOGVV(TAG, "Switching to HUMIDIFY action");
|
||||
break;
|
||||
case THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE:
|
||||
default:
|
||||
action = THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF;
|
||||
// trig = this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
if (this->prev_humidity_control_trigger_ != nullptr) {
|
||||
this->prev_humidity_control_trigger_->stop_action();
|
||||
this->prev_humidity_control_trigger_ = nullptr;
|
||||
}
|
||||
this->humidification_action_ = action;
|
||||
this->prev_humidity_control_trigger_ = trig;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
if ((fan_mode == this->prev_fan_mode_) && this->setup_complete_) {
|
||||
@@ -885,6 +972,20 @@ void ThermostatClimate::idle_on_timer_callback_() {
|
||||
this->switch_to_supplemental_action_(this->compute_supplemental_action_());
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_humidity_change_trigger_() {
|
||||
if ((this->prev_target_humidity_ == this->target_humidity) && this->setup_complete_) {
|
||||
return; // nothing changed, no reason to trigger
|
||||
} else {
|
||||
// save the new temperature so we can check it again later; the trigger will fire below
|
||||
this->prev_target_humidity_ = this->target_humidity;
|
||||
}
|
||||
// trigger the action
|
||||
Trigger<> *trig = this->humidity_change_trigger_;
|
||||
if (trig != nullptr) {
|
||||
trig->trigger();
|
||||
}
|
||||
}
|
||||
|
||||
void ThermostatClimate::check_temperature_change_trigger_() {
|
||||
if (this->supports_two_points_) {
|
||||
// setup_complete_ helps us ensure an action is called immediately after boot
|
||||
@@ -994,6 +1095,32 @@ bool ThermostatClimate::supplemental_heating_required_() {
|
||||
(this->supplemental_action_ == climate::CLIMATE_ACTION_HEATING));
|
||||
}
|
||||
|
||||
bool ThermostatClimate::dehumidification_required_() {
|
||||
if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity exceeds the target + hysteresis, dehumidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is less than the target - hysteresis, dehumidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target + hysteresis and target - hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY;
|
||||
}
|
||||
|
||||
bool ThermostatClimate::humidification_required_() {
|
||||
if (this->current_humidity < this->target_humidity - this->humidity_hysteresis_) {
|
||||
// if the current humidity is below the target - hysteresis, humidification is required
|
||||
return true;
|
||||
} else if (this->current_humidity > this->target_humidity + this->humidity_hysteresis_) {
|
||||
// if the current humidity is above the target + hysteresis, humidification should stop
|
||||
return false;
|
||||
}
|
||||
// if we get here, the current humidity is between target - hysteresis and target + hysteresis,
|
||||
// so the action should not change
|
||||
return this->humidification_action_ == THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config) {
|
||||
if (this->supports_heat_) {
|
||||
ESP_LOGCONFIG(TAG, " Default Target Temperature Low: %.1f°C",
|
||||
@@ -1150,8 +1277,12 @@ ThermostatClimate::ThermostatClimate()
|
||||
swing_mode_off_trigger_(new Trigger<>()),
|
||||
swing_mode_horizontal_trigger_(new Trigger<>()),
|
||||
swing_mode_vertical_trigger_(new Trigger<>()),
|
||||
humidity_change_trigger_(new Trigger<>()),
|
||||
temperature_change_trigger_(new Trigger<>()),
|
||||
preset_change_trigger_(new Trigger<>()) {}
|
||||
preset_change_trigger_(new Trigger<>()),
|
||||
humidity_control_dehumidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_humidify_action_trigger_(new Trigger<>()),
|
||||
humidity_control_off_action_trigger_(new Trigger<>()) {}
|
||||
|
||||
void ThermostatClimate::set_default_preset(const std::string &custom_preset) {
|
||||
this->default_custom_preset_ = custom_preset;
|
||||
@@ -1215,6 +1346,9 @@ void ThermostatClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sen
|
||||
void ThermostatClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) {
|
||||
this->humidity_sensor_ = humidity_sensor;
|
||||
}
|
||||
void ThermostatClimate::set_humidity_hysteresis(float humidity_hysteresis) {
|
||||
this->humidity_hysteresis_ = std::clamp<float>(humidity_hysteresis, 0.0f, 100.0f);
|
||||
}
|
||||
void ThermostatClimate::set_use_startup_delay(bool use_startup_delay) { this->use_startup_delay_ = use_startup_delay; }
|
||||
void ThermostatClimate::set_supports_heat_cool(bool supports_heat_cool) {
|
||||
this->supports_heat_cool_ = supports_heat_cool;
|
||||
@@ -1282,6 +1416,18 @@ void ThermostatClimate::set_supports_swing_mode_vertical(bool supports_swing_mod
|
||||
void ThermostatClimate::set_supports_two_points(bool supports_two_points) {
|
||||
this->supports_two_points_ = supports_two_points;
|
||||
}
|
||||
void ThermostatClimate::set_supports_dehumidification(bool supports_dehumidification) {
|
||||
this->supports_dehumidification_ = supports_dehumidification;
|
||||
if (supports_dehumidification) {
|
||||
this->supports_humidification_ = false;
|
||||
}
|
||||
}
|
||||
void ThermostatClimate::set_supports_humidification(bool supports_humidification) {
|
||||
this->supports_humidification_ = supports_humidification;
|
||||
if (supports_humidification) {
|
||||
this->supports_dehumidification_ = false;
|
||||
}
|
||||
}
|
||||
|
||||
Trigger<> *ThermostatClimate::get_cool_action_trigger() const { return this->cool_action_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_supplemental_cool_action_trigger() const {
|
||||
@@ -1315,8 +1461,18 @@ Trigger<> *ThermostatClimate::get_swing_mode_both_trigger() const { return this-
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_off_trigger() const { return this->swing_mode_off_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_horizontal_trigger() const { return this->swing_mode_horizontal_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_swing_mode_vertical_trigger() const { return this->swing_mode_vertical_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_change_trigger() const { return this->humidity_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_temperature_change_trigger() const { return this->temperature_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_preset_change_trigger() const { return this->preset_change_trigger_; }
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_dehumidify_action_trigger() const {
|
||||
return this->humidity_control_dehumidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_humidify_action_trigger() const {
|
||||
return this->humidity_control_humidify_action_trigger_;
|
||||
}
|
||||
Trigger<> *ThermostatClimate::get_humidity_control_off_action_trigger() const {
|
||||
return this->humidity_control_off_action_trigger_;
|
||||
}
|
||||
|
||||
void ThermostatClimate::dump_config() {
|
||||
LOG_CLIMATE("", "Thermostat", this);
|
||||
@@ -1420,7 +1576,12 @@ void ThermostatClimate::dump_config() {
|
||||
" OFF: %s\n"
|
||||
" HORIZONTAL: %s\n"
|
||||
" VERTICAL: %s\n"
|
||||
" Supports TWO SET POINTS: %s",
|
||||
" Supports TWO SET POINTS: %s\n"
|
||||
" Supported Humidity Parameters:\n"
|
||||
" CURRENT: %s\n"
|
||||
" TARGET: %s\n"
|
||||
" DEHUMIDIFICATION: %s\n"
|
||||
" HUMIDIFICATION: %s",
|
||||
YESNO(this->supports_fan_mode_on_), YESNO(this->supports_fan_mode_off_),
|
||||
YESNO(this->supports_fan_mode_auto_), YESNO(this->supports_fan_mode_low_),
|
||||
YESNO(this->supports_fan_mode_medium_), YESNO(this->supports_fan_mode_high_),
|
||||
@@ -1428,7 +1589,10 @@ void ThermostatClimate::dump_config() {
|
||||
YESNO(this->supports_fan_mode_diffuse_), YESNO(this->supports_fan_mode_quiet_),
|
||||
YESNO(this->supports_swing_mode_both_), YESNO(this->supports_swing_mode_off_),
|
||||
YESNO(this->supports_swing_mode_horizontal_), YESNO(this->supports_swing_mode_vertical_),
|
||||
YESNO(this->supports_two_points_));
|
||||
YESNO(this->supports_two_points_),
|
||||
YESNO(this->get_traits().has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY)),
|
||||
YESNO(this->supports_dehumidification_ || this->supports_humidification_),
|
||||
YESNO(this->supports_dehumidification_), YESNO(this->supports_humidification_));
|
||||
|
||||
if (!this->preset_config_.empty()) {
|
||||
ESP_LOGCONFIG(TAG, " Supported PRESETS:");
|
||||
|
||||
@@ -13,6 +13,13 @@
|
||||
namespace esphome {
|
||||
namespace thermostat {
|
||||
|
||||
enum HumidificationAction : uint8_t {
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_OFF = 0,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_DEHUMIDIFY = 1,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_HUMIDIFY = 2,
|
||||
THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE,
|
||||
};
|
||||
|
||||
enum ThermostatClimateTimerIndex : uint8_t {
|
||||
THERMOSTAT_TIMER_COOLING_MAX_RUN_TIME = 0,
|
||||
THERMOSTAT_TIMER_COOLING_OFF = 1,
|
||||
@@ -90,6 +97,7 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_idle_minimum_time_in_sec(uint32_t time);
|
||||
void set_sensor(sensor::Sensor *sensor);
|
||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor);
|
||||
void set_humidity_hysteresis(float humidity_hysteresis);
|
||||
void set_use_startup_delay(bool use_startup_delay);
|
||||
void set_supports_auto(bool supports_auto);
|
||||
void set_supports_heat_cool(bool supports_heat_cool);
|
||||
@@ -115,6 +123,8 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
void set_supports_swing_mode_horizontal(bool supports_swing_mode_horizontal);
|
||||
void set_supports_swing_mode_off(bool supports_swing_mode_off);
|
||||
void set_supports_swing_mode_vertical(bool supports_swing_mode_vertical);
|
||||
void set_supports_dehumidification(bool supports_dehumidification);
|
||||
void set_supports_humidification(bool supports_humidification);
|
||||
void set_supports_two_points(bool supports_two_points);
|
||||
|
||||
void set_preset_config(climate::ClimatePreset preset, const ThermostatClimateTargetTempConfig &config);
|
||||
@@ -148,8 +158,12 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *get_swing_mode_horizontal_trigger() const;
|
||||
Trigger<> *get_swing_mode_off_trigger() const;
|
||||
Trigger<> *get_swing_mode_vertical_trigger() const;
|
||||
Trigger<> *get_humidity_change_trigger() const;
|
||||
Trigger<> *get_temperature_change_trigger() const;
|
||||
Trigger<> *get_preset_change_trigger() const;
|
||||
Trigger<> *get_humidity_control_dehumidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_humidify_action_trigger() const;
|
||||
Trigger<> *get_humidity_control_off_action_trigger() const;
|
||||
/// Get current hysteresis values
|
||||
float cool_deadband();
|
||||
float cool_overrun();
|
||||
@@ -166,11 +180,13 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
climate::ClimateFanMode locked_fan_mode();
|
||||
/// Set point and hysteresis validation
|
||||
bool hysteresis_valid(); // returns true if valid
|
||||
bool humidity_hysteresis_valid(); // returns true if valid
|
||||
bool limit_setpoints_for_heat_cool(); // returns true if set points should be further limited within visual range
|
||||
void validate_target_temperature();
|
||||
void validate_target_temperatures(bool pin_target_temperature_high);
|
||||
void validate_target_temperature_low();
|
||||
void validate_target_temperature_high();
|
||||
void validate_target_humidity();
|
||||
|
||||
protected:
|
||||
/// Override control to change settings of the climate device.
|
||||
@@ -192,11 +208,13 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Re-compute the required action of this climate controller.
|
||||
climate::ClimateAction compute_action_(bool ignore_timers = false);
|
||||
climate::ClimateAction compute_supplemental_action_();
|
||||
HumidificationAction compute_humidity_control_action_();
|
||||
|
||||
/// Switch the climate device to the given climate action.
|
||||
void switch_to_action_(climate::ClimateAction action, bool publish_state = true);
|
||||
void switch_to_supplemental_action_(climate::ClimateAction action);
|
||||
void trigger_supplemental_action_();
|
||||
void switch_to_humidity_control_action_(HumidificationAction action);
|
||||
|
||||
/// Switch the climate device to the given climate fan mode.
|
||||
void switch_to_fan_mode_(climate::ClimateFanMode fan_mode, bool publish_state = true);
|
||||
@@ -207,6 +225,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Switch the climate device to the given climate swing mode.
|
||||
void switch_to_swing_mode_(climate::ClimateSwingMode swing_mode, bool publish_state = true);
|
||||
|
||||
/// Check if the humidity change trigger should be called.
|
||||
void check_humidity_change_trigger_();
|
||||
|
||||
/// Check if the temperature change trigger should be called.
|
||||
void check_temperature_change_trigger_();
|
||||
|
||||
@@ -243,6 +264,8 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
bool heating_required_();
|
||||
bool supplemental_cooling_required_();
|
||||
bool supplemental_heating_required_();
|
||||
bool dehumidification_required_();
|
||||
bool humidification_required_();
|
||||
|
||||
void dump_preset_config_(const char *preset_name, const ThermostatClimateTargetTempConfig &config);
|
||||
|
||||
@@ -259,6 +282,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The current supplemental action
|
||||
climate::ClimateAction supplemental_action_{climate::CLIMATE_ACTION_OFF};
|
||||
|
||||
/// The current humidification action
|
||||
HumidificationAction humidification_action_{THERMOSTAT_HUMIDITY_CONTROL_ACTION_NONE};
|
||||
|
||||
/// Default standard preset to use on start up
|
||||
climate::ClimatePreset default_preset_{};
|
||||
|
||||
@@ -321,6 +347,12 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_two_points_{false};
|
||||
|
||||
/// Whether the controller supports dehumidification and/or humidification
|
||||
///
|
||||
/// A false value means that the controller has no such support.
|
||||
bool supports_dehumidification_{false};
|
||||
bool supports_humidification_{false};
|
||||
|
||||
/// Flags indicating if maximum allowable run time was exceeded
|
||||
bool cooling_max_runtime_exceeded_{false};
|
||||
bool heating_max_runtime_exceeded_{false};
|
||||
@@ -331,9 +363,10 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// setup_complete_ blocks modifying/resetting the temps immediately after boot
|
||||
bool setup_complete_{false};
|
||||
|
||||
/// Store previously-known temperatures
|
||||
/// Store previously-known humidity and temperatures
|
||||
///
|
||||
/// These are used to determine when the temperature change trigger/action needs to be called
|
||||
/// These are used to determine when a temperature/humidity has changed
|
||||
float prev_target_humidity_{NAN};
|
||||
float prev_target_temperature_{NAN};
|
||||
float prev_target_temperature_low_{NAN};
|
||||
float prev_target_temperature_high_{NAN};
|
||||
@@ -347,6 +380,9 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
float heating_deadband_{0};
|
||||
float heating_overrun_{0};
|
||||
|
||||
/// Hysteresis values used for computing humidification action
|
||||
float humidity_hysteresis_{0};
|
||||
|
||||
/// Maximum allowable temperature deltas before engaging supplemental cooling/heating actions
|
||||
float supplemental_cool_delta_{0};
|
||||
float supplemental_heat_delta_{0};
|
||||
@@ -448,12 +484,24 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// The trigger to call when the controller should switch the swing mode to "vertical".
|
||||
Trigger<> *swing_mode_vertical_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target humidity changes.
|
||||
Trigger<> *humidity_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the target temperature(s) change(es).
|
||||
Trigger<> *temperature_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when the preset mode changes
|
||||
Trigger<> *preset_change_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when dehumidification is required
|
||||
Trigger<> *humidity_control_dehumidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when humidification is required
|
||||
Trigger<> *humidity_control_humidify_action_trigger_{nullptr};
|
||||
|
||||
/// The trigger to call when (de)humidification should stop
|
||||
Trigger<> *humidity_control_off_action_trigger_{nullptr};
|
||||
|
||||
/// A reference to the trigger that was previously active.
|
||||
///
|
||||
/// This is so that the previous trigger can be stopped before enabling a new one
|
||||
@@ -462,6 +510,7 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
Trigger<> *prev_fan_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_swing_mode_trigger_{nullptr};
|
||||
Trigger<> *prev_humidity_control_trigger_{nullptr};
|
||||
|
||||
/// Default custom preset to use on start up
|
||||
std::string default_custom_preset_{};
|
||||
|
||||
@@ -14,6 +14,7 @@ MODELS = {
|
||||
"GENERIC": Model.MODEL_GENERIC,
|
||||
"RAC-PT1411HWRU-C": Model.MODEL_RAC_PT1411HWRU_C,
|
||||
"RAC-PT1411HWRU-F": Model.MODEL_RAC_PT1411HWRU_F,
|
||||
"RAS-2819T": Model.MODEL_RAS_2819T,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = climate_ir.climate_ir_with_receiver_schema(ToshibaClimate).extend(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "toshiba.h"
|
||||
#include "esphome/components/remote_base/toshiba_ac_protocol.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
@@ -97,6 +98,282 @@ const std::vector<uint8_t> RAC_PT1411HWRU_TEMPERATURE_F{0x10, 0x30, 0x00, 0x20,
|
||||
0x22, 0x06, 0x26, 0x07, 0x05, 0x25, 0x04, 0x24, 0x0C,
|
||||
0x2C, 0x0D, 0x2D, 0x09, 0x08, 0x28, 0x0A, 0x2A, 0x0B};
|
||||
|
||||
// RAS-2819T protocol constants
|
||||
const uint16_t RAS_2819T_HEADER1 = 0xC23D;
|
||||
const uint8_t RAS_2819T_HEADER2 = 0xD5;
|
||||
const uint8_t RAS_2819T_MESSAGE_LENGTH = 6;
|
||||
|
||||
// RAS-2819T fan speed codes for rc_code_1 (bytes 2-3)
|
||||
const uint16_t RAS_2819T_FAN_AUTO = 0xBF40;
|
||||
const uint16_t RAS_2819T_FAN_QUIET = 0xFF00;
|
||||
const uint16_t RAS_2819T_FAN_LOW = 0x9F60;
|
||||
const uint16_t RAS_2819T_FAN_MEDIUM = 0x5FA0;
|
||||
const uint16_t RAS_2819T_FAN_HIGH = 0x3FC0;
|
||||
|
||||
// RAS-2819T fan speed codes for rc_code_2 (byte 1)
|
||||
const uint8_t RAS_2819T_FAN2_AUTO = 0x66;
|
||||
const uint8_t RAS_2819T_FAN2_QUIET = 0x01;
|
||||
const uint8_t RAS_2819T_FAN2_LOW = 0x28;
|
||||
const uint8_t RAS_2819T_FAN2_MEDIUM = 0x3C;
|
||||
const uint8_t RAS_2819T_FAN2_HIGH = 0x50;
|
||||
|
||||
// RAS-2819T second packet suffix bytes for rc_code_2 (bytes 3-5)
|
||||
// These are fixed patterns, not actual checksums
|
||||
struct Ras2819tPacketSuffix {
|
||||
uint8_t byte3;
|
||||
uint8_t byte4;
|
||||
uint8_t byte5;
|
||||
};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_AUTO{0x00, 0x02, 0x3D};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_QUIET{0x00, 0x02, 0xD8};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_LOW{0x00, 0x02, 0xFF};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_MEDIUM{0x00, 0x02, 0x13};
|
||||
const Ras2819tPacketSuffix RAS_2819T_SUFFIX_HIGH{0x00, 0x02, 0x27};
|
||||
|
||||
// RAS-2819T swing toggle command
|
||||
const uint64_t RAS_2819T_SWING_TOGGLE = 0xC23D6B94E01F;
|
||||
|
||||
// RAS-2819T single-packet commands
|
||||
const uint64_t RAS_2819T_POWER_OFF_COMMAND = 0xC23D7B84E01F;
|
||||
|
||||
// RAS-2819T known valid command patterns for validation
|
||||
const std::array<uint64_t, 2> RAS_2819T_VALID_SINGLE_COMMANDS = {
|
||||
RAS_2819T_POWER_OFF_COMMAND, // Power off
|
||||
RAS_2819T_SWING_TOGGLE, // Swing toggle
|
||||
};
|
||||
|
||||
const uint16_t RAS_2819T_VALID_HEADER1 = 0xC23D;
|
||||
const uint8_t RAS_2819T_VALID_HEADER2 = 0xD5;
|
||||
|
||||
const uint8_t RAS_2819T_DRY_BYTE2 = 0x1F;
|
||||
const uint8_t RAS_2819T_DRY_BYTE3 = 0xE0;
|
||||
const uint8_t RAS_2819T_DRY_TEMP_OFFSET = 0x24;
|
||||
|
||||
const uint8_t RAS_2819T_AUTO_BYTE2 = 0x1F;
|
||||
const uint8_t RAS_2819T_AUTO_BYTE3 = 0xE0;
|
||||
const uint8_t RAS_2819T_AUTO_TEMP_OFFSET = 0x08;
|
||||
|
||||
const uint8_t RAS_2819T_FAN_ONLY_TEMP = 0xE4;
|
||||
const uint8_t RAS_2819T_FAN_ONLY_TEMP_INV = 0x1B;
|
||||
|
||||
const uint8_t RAS_2819T_HEAT_TEMP_OFFSET = 0x0C;
|
||||
|
||||
// RAS-2819T second packet fixed values
|
||||
const uint8_t RAS_2819T_AUTO_DRY_FAN_BYTE = 0x65;
|
||||
const uint8_t RAS_2819T_AUTO_DRY_SUFFIX = 0x3A;
|
||||
const uint8_t RAS_2819T_HEAT_SUFFIX = 0x3B;
|
||||
|
||||
// RAS-2819T temperature codes for 18-30°C
|
||||
static const uint8_t RAS_2819T_TEMP_CODES[] = {
|
||||
0x10, // 18°C
|
||||
0x30, // 19°C
|
||||
0x20, // 20°C
|
||||
0x60, // 21°C
|
||||
0x70, // 22°C
|
||||
0x50, // 23°C
|
||||
0x40, // 24°C
|
||||
0xC0, // 25°C
|
||||
0xD0, // 26°C
|
||||
0x90, // 27°C
|
||||
0x80, // 28°C
|
||||
0xA0, // 29°C
|
||||
0xB0 // 30°C
|
||||
};
|
||||
|
||||
// Helper functions for RAS-2819T protocol
|
||||
//
|
||||
// ===== RAS-2819T PROTOCOL DOCUMENTATION =====
|
||||
//
|
||||
// The RAS-2819T uses a two-packet IR protocol with some exceptions for simple commands.
|
||||
//
|
||||
// PACKET STRUCTURE:
|
||||
// All packets are 6 bytes (48 bits) transmitted with standard Toshiba timing.
|
||||
//
|
||||
// TWO-PACKET COMMANDS (Mode/Temperature/Fan changes):
|
||||
//
|
||||
// First Packet (rc_code_1): [C2 3D] [FAN_HI FAN_LO] [TEMP] [~TEMP]
|
||||
// Byte 0-1: Header (always 0xC23D)
|
||||
// Byte 2-3: Fan speed encoding (varies by mode, see fan tables below)
|
||||
// Byte 4: Temperature + mode encoding
|
||||
// Byte 5: Bitwise complement of temperature byte
|
||||
//
|
||||
// Second Packet (rc_code_2): [D5] [FAN2] [00] [SUF1] [SUF2] [SUF3]
|
||||
// Byte 0: Header (always 0xD5)
|
||||
// Byte 1: Fan speed secondary encoding
|
||||
// Byte 2: Always 0x00
|
||||
// Byte 3-5: Fixed suffix pattern (depends on fan speed and mode)
|
||||
//
|
||||
// TEMPERATURE ENCODING:
|
||||
// Base temp codes: 18°C=0x10, 19°C=0x30, 20°C=0x20, 21°C=0x60, 22°C=0x70,
|
||||
// 23°C=0x50, 24°C=0x40, 25°C=0xC0, 26°C=0xD0, 27°C=0x90,
|
||||
// 28°C=0x80, 29°C=0xA0, 30°C=0xB0
|
||||
// Mode offsets added to base temp:
|
||||
// COOL: No offset
|
||||
// HEAT: +0x0C (e.g., 24°C heat = 0x40 | 0x0C = 0x4C)
|
||||
// AUTO: +0x08 (e.g., 24°C auto = 0x40 | 0x08 = 0x48)
|
||||
// DRY: +0x24 (e.g., 24°C dry = 0x40 | 0x24 = 0x64)
|
||||
//
|
||||
// FAN SPEED ENCODING (First packet bytes 2-3):
|
||||
// AUTO: 0xBF40, QUIET: 0xFF00, LOW: 0x9F60, MEDIUM: 0x5FA0, HIGH: 0x3FC0
|
||||
// Special cases: AUTO/DRY modes use 0x1FE0 instead
|
||||
//
|
||||
// SINGLE-PACKET COMMANDS:
|
||||
// Power Off: 0xC23D7B84E01F (6 bytes, no second packet)
|
||||
// Swing Toggle: 0xC23D6B94E01F (6 bytes, no second packet)
|
||||
//
|
||||
// MODE DETECTION (from first packet):
|
||||
// - Check bytes 2-3: if 0x7B84 → OFF mode
|
||||
// - Check bytes 2-3: if 0x1FE0 → AUTO/DRY/low-temp-COOL (distinguish by temp code)
|
||||
// - Otherwise: COOL/HEAT/FAN_ONLY (distinguish by temp code and byte 5)
|
||||
|
||||
/**
|
||||
* Get fan speed encoding for RAS-2819T first packet (rc_code_1, bytes 2-3)
|
||||
*/
|
||||
static uint16_t get_ras_2819t_fan_code(climate::ClimateFanMode fan_mode) {
|
||||
switch (fan_mode) {
|
||||
case climate::CLIMATE_FAN_QUIET:
|
||||
return RAS_2819T_FAN_QUIET;
|
||||
case climate::CLIMATE_FAN_LOW:
|
||||
return RAS_2819T_FAN_LOW;
|
||||
case climate::CLIMATE_FAN_MEDIUM:
|
||||
return RAS_2819T_FAN_MEDIUM;
|
||||
case climate::CLIMATE_FAN_HIGH:
|
||||
return RAS_2819T_FAN_HIGH;
|
||||
case climate::CLIMATE_FAN_AUTO:
|
||||
default:
|
||||
return RAS_2819T_FAN_AUTO;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get fan speed encoding for RAS-2819T rc_code_2 packet (second packet)
|
||||
*/
|
||||
struct Ras2819tSecondPacketCodes {
|
||||
uint8_t fan_byte;
|
||||
Ras2819tPacketSuffix suffix;
|
||||
};
|
||||
|
||||
static Ras2819tSecondPacketCodes get_ras_2819t_second_packet_codes(climate::ClimateFanMode fan_mode) {
|
||||
switch (fan_mode) {
|
||||
case climate::CLIMATE_FAN_QUIET:
|
||||
return {RAS_2819T_FAN2_QUIET, RAS_2819T_SUFFIX_QUIET};
|
||||
case climate::CLIMATE_FAN_LOW:
|
||||
return {RAS_2819T_FAN2_LOW, RAS_2819T_SUFFIX_LOW};
|
||||
case climate::CLIMATE_FAN_MEDIUM:
|
||||
return {RAS_2819T_FAN2_MEDIUM, RAS_2819T_SUFFIX_MEDIUM};
|
||||
case climate::CLIMATE_FAN_HIGH:
|
||||
return {RAS_2819T_FAN2_HIGH, RAS_2819T_SUFFIX_HIGH};
|
||||
case climate::CLIMATE_FAN_AUTO:
|
||||
default:
|
||||
return {RAS_2819T_FAN2_AUTO, RAS_2819T_SUFFIX_AUTO};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get temperature code for RAS-2819T protocol
|
||||
*/
|
||||
static uint8_t get_ras_2819t_temp_code(float temperature) {
|
||||
int temp_index = static_cast<int>(temperature) - 18;
|
||||
if (temp_index < 0 || temp_index >= static_cast<int>(sizeof(RAS_2819T_TEMP_CODES))) {
|
||||
ESP_LOGW(TAG, "Temperature %.1f°C out of range [18-30°C], defaulting to 24°C", temperature);
|
||||
return 0x40; // Default to 24°C
|
||||
}
|
||||
|
||||
return RAS_2819T_TEMP_CODES[temp_index];
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode temperature from RAS-2819T temp code
|
||||
*/
|
||||
static float decode_ras_2819t_temperature(uint8_t temp_code) {
|
||||
uint8_t base_temp_code = temp_code & 0xF0;
|
||||
|
||||
// Find the code in the temperature array
|
||||
for (size_t temp_index = 0; temp_index < sizeof(RAS_2819T_TEMP_CODES); temp_index++) {
|
||||
if (RAS_2819T_TEMP_CODES[temp_index] == base_temp_code) {
|
||||
return static_cast<float>(temp_index + 18); // 18°C is the minimum
|
||||
}
|
||||
}
|
||||
|
||||
ESP_LOGW(TAG, "Unknown temp code: 0x%02X, defaulting to 24°C", base_temp_code);
|
||||
return 24.0f; // Default to 24°C
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode fan speed from RAS-2819T IR codes
|
||||
*/
|
||||
static climate::ClimateFanMode decode_ras_2819t_fan_mode(uint16_t fan_code) {
|
||||
switch (fan_code) {
|
||||
case RAS_2819T_FAN_QUIET:
|
||||
return climate::CLIMATE_FAN_QUIET;
|
||||
case RAS_2819T_FAN_LOW:
|
||||
return climate::CLIMATE_FAN_LOW;
|
||||
case RAS_2819T_FAN_MEDIUM:
|
||||
return climate::CLIMATE_FAN_MEDIUM;
|
||||
case RAS_2819T_FAN_HIGH:
|
||||
return climate::CLIMATE_FAN_HIGH;
|
||||
case RAS_2819T_FAN_AUTO:
|
||||
default:
|
||||
return climate::CLIMATE_FAN_AUTO;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate RAS-2819T IR command structure and content
|
||||
*/
|
||||
static bool is_valid_ras_2819t_command(uint64_t rc_code_1, uint64_t rc_code_2 = 0) {
|
||||
// Check header of first packet
|
||||
uint16_t header1 = (rc_code_1 >> 32) & 0xFFFF;
|
||||
if (header1 != RAS_2819T_VALID_HEADER1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Single packet commands
|
||||
if (rc_code_2 == 0) {
|
||||
for (uint64_t valid_cmd : RAS_2819T_VALID_SINGLE_COMMANDS) {
|
||||
if (rc_code_1 == valid_cmd) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Additional validation for unknown single packets
|
||||
return false;
|
||||
}
|
||||
|
||||
// Two-packet commands - validate second packet header
|
||||
uint8_t header2 = (rc_code_2 >> 40) & 0xFF;
|
||||
if (header2 != RAS_2819T_VALID_HEADER2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate temperature complement in first packet (byte 4 should be ~byte 5)
|
||||
uint8_t temp_byte = (rc_code_1 >> 8) & 0xFF;
|
||||
uint8_t temp_complement = rc_code_1 & 0xFF;
|
||||
if (temp_byte != static_cast<uint8_t>(~temp_complement)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate fan speed combinations make sense
|
||||
uint16_t fan_code = (rc_code_1 >> 16) & 0xFFFF;
|
||||
uint8_t fan2_byte = (rc_code_2 >> 32) & 0xFF;
|
||||
|
||||
// Check if fan codes are from known valid patterns
|
||||
bool valid_fan_combo = false;
|
||||
if (fan_code == RAS_2819T_FAN_AUTO && fan2_byte == RAS_2819T_FAN2_AUTO)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_QUIET && fan2_byte == RAS_2819T_FAN2_QUIET)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_LOW && fan2_byte == RAS_2819T_FAN2_LOW)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_MEDIUM && fan2_byte == RAS_2819T_FAN2_MEDIUM)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == RAS_2819T_FAN_HIGH && fan2_byte == RAS_2819T_FAN2_HIGH)
|
||||
valid_fan_combo = true;
|
||||
if (fan_code == 0x1FE0 && fan2_byte == RAS_2819T_AUTO_DRY_FAN_BYTE)
|
||||
valid_fan_combo = true; // AUTO/DRY
|
||||
|
||||
return valid_fan_combo;
|
||||
}
|
||||
|
||||
void ToshibaClimate::setup() {
|
||||
if (this->sensor_) {
|
||||
this->sensor_->add_on_state_callback([this](float state) {
|
||||
@@ -126,16 +403,43 @@ void ToshibaClimate::setup() {
|
||||
this->minimum_temperature_ = this->temperature_min_();
|
||||
this->maximum_temperature_ = this->temperature_max_();
|
||||
this->swing_modes_ = this->toshiba_swing_modes_();
|
||||
|
||||
// Ensure swing mode is always initialized to a valid value
|
||||
if (this->swing_modes_.empty() || this->swing_modes_.find(this->swing_mode) == this->swing_modes_.end()) {
|
||||
// No swing support for this model or current swing mode not supported, reset to OFF
|
||||
this->swing_mode = climate::CLIMATE_SWING_OFF;
|
||||
}
|
||||
|
||||
// Ensure mode is valid - ESPHome should only use standard climate modes
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF && this->mode != climate::CLIMATE_MODE_HEAT &&
|
||||
this->mode != climate::CLIMATE_MODE_COOL && this->mode != climate::CLIMATE_MODE_HEAT_COOL &&
|
||||
this->mode != climate::CLIMATE_MODE_DRY && this->mode != climate::CLIMATE_MODE_FAN_ONLY) {
|
||||
ESP_LOGW(TAG, "Invalid mode detected during setup, resetting to OFF");
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
}
|
||||
|
||||
// Ensure fan mode is valid
|
||||
if (!this->fan_mode.has_value()) {
|
||||
ESP_LOGW(TAG, "Fan mode not set during setup, defaulting to AUTO");
|
||||
this->fan_mode = climate::CLIMATE_FAN_AUTO;
|
||||
}
|
||||
|
||||
// Never send nan to HA
|
||||
if (std::isnan(this->target_temperature))
|
||||
this->target_temperature = 24;
|
||||
// Log final state for debugging HA errors
|
||||
ESP_LOGV(TAG, "Setup complete - Mode: %d, Fan: %s, Swing: %d, Temp: %.1f", static_cast<int>(this->mode),
|
||||
this->fan_mode.has_value() ? std::to_string(static_cast<int>(this->fan_mode.value())).c_str() : "NONE",
|
||||
static_cast<int>(this->swing_mode), this->target_temperature);
|
||||
}
|
||||
|
||||
void ToshibaClimate::transmit_state() {
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F) {
|
||||
transmit_rac_pt1411hwru_();
|
||||
this->transmit_rac_pt1411hwru_();
|
||||
} else if (this->model_ == MODEL_RAS_2819T) {
|
||||
this->transmit_ras_2819t_();
|
||||
} else {
|
||||
transmit_generic_();
|
||||
this->transmit_generic_();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,7 +534,7 @@ void ToshibaClimate::transmit_generic_() {
|
||||
auto transmit = this->transmitter_->transmit();
|
||||
auto *data = transmit.get_data();
|
||||
|
||||
encode_(data, message, message_length, 1);
|
||||
this->encode_(data, message, message_length, 1);
|
||||
|
||||
transmit.perform();
|
||||
}
|
||||
@@ -348,15 +652,12 @@ void ToshibaClimate::transmit_rac_pt1411hwru_() {
|
||||
message[11] += message[index];
|
||||
}
|
||||
}
|
||||
ESP_LOGV(TAG, "*** Generated codes: 0x%.2X%.2X%.2X%.2X%.2X%.2X 0x%.2X%.2X%.2X%.2X%.2X%.2X", message[0], message[1],
|
||||
message[2], message[3], message[4], message[5], message[6], message[7], message[8], message[9], message[10],
|
||||
message[11]);
|
||||
|
||||
// load first block of IR code and repeat it once
|
||||
encode_(data, &message[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &message[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
// load second block of IR code, if present
|
||||
if (message[6] != 0) {
|
||||
encode_(data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH, 0);
|
||||
this->encode_(data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH, 0);
|
||||
}
|
||||
|
||||
transmit.perform();
|
||||
@@ -366,19 +667,19 @@ void ToshibaClimate::transmit_rac_pt1411hwru_() {
|
||||
data->space(TOSHIBA_PACKET_SPACE);
|
||||
switch (this->swing_mode) {
|
||||
case climate::CLIMATE_SWING_VERTICAL:
|
||||
encode_(data, &RAC_PT1411HWRU_SWING_VERTICAL[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &RAC_PT1411HWRU_SWING_VERTICAL[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_SWING_OFF:
|
||||
default:
|
||||
encode_(data, &RAC_PT1411HWRU_SWING_OFF[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, &RAC_PT1411HWRU_SWING_OFF[0], RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
}
|
||||
|
||||
data->space(TOSHIBA_PACKET_SPACE);
|
||||
transmit.perform();
|
||||
|
||||
if (this->sensor_) {
|
||||
transmit_rac_pt1411hwru_temp_(true, false);
|
||||
this->transmit_rac_pt1411hwru_temp_(true, false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,15 +731,217 @@ void ToshibaClimate::transmit_rac_pt1411hwru_temp_(const bool cs_state, const bo
|
||||
// Byte 5: Footer lower/bitwise complement of byte 4
|
||||
message[5] = ~message[4];
|
||||
|
||||
ESP_LOGV(TAG, "*** Generated code: 0x%.2X%.2X%.2X%.2X%.2X%.2X", message[0], message[1], message[2], message[3],
|
||||
message[4], message[5]);
|
||||
// load IR code and repeat it once
|
||||
encode_(data, message, RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
this->encode_(data, message, RAC_PT1411HWRU_MESSAGE_LENGTH, 1);
|
||||
|
||||
transmit.perform();
|
||||
}
|
||||
}
|
||||
|
||||
void ToshibaClimate::transmit_ras_2819t_() {
|
||||
// Handle swing mode transmission for RAS-2819T
|
||||
// Note: RAS-2819T uses a toggle command, so we need to track state changes
|
||||
|
||||
// Check if ONLY swing mode changed (and no other climate parameters)
|
||||
bool swing_changed = (this->swing_mode != this->last_swing_mode_);
|
||||
bool mode_changed = (this->mode != this->last_mode_);
|
||||
bool fan_changed = (this->fan_mode != this->last_fan_mode_);
|
||||
bool temp_changed = (abs(this->target_temperature - this->last_target_temperature_) > 0.1f);
|
||||
|
||||
bool only_swing_changed = swing_changed && !mode_changed && !fan_changed && !temp_changed;
|
||||
|
||||
if (only_swing_changed) {
|
||||
// Send ONLY swing toggle command (like the physical remote does)
|
||||
auto swing_transmit = this->transmitter_->transmit();
|
||||
auto *swing_data = swing_transmit.get_data();
|
||||
|
||||
// Convert toggle command to bytes for transmission
|
||||
uint8_t swing_message[RAS_2819T_MESSAGE_LENGTH];
|
||||
swing_message[0] = (RAS_2819T_SWING_TOGGLE >> 40) & 0xFF;
|
||||
swing_message[1] = (RAS_2819T_SWING_TOGGLE >> 32) & 0xFF;
|
||||
swing_message[2] = (RAS_2819T_SWING_TOGGLE >> 24) & 0xFF;
|
||||
swing_message[3] = (RAS_2819T_SWING_TOGGLE >> 16) & 0xFF;
|
||||
swing_message[4] = (RAS_2819T_SWING_TOGGLE >> 8) & 0xFF;
|
||||
swing_message[5] = RAS_2819T_SWING_TOGGLE & 0xFF;
|
||||
|
||||
// Use single packet transmission WITH repeat (like regular commands)
|
||||
this->encode_(swing_data, swing_message, RAS_2819T_MESSAGE_LENGTH, 1);
|
||||
swing_transmit.perform();
|
||||
|
||||
// Update all state tracking
|
||||
this->last_swing_mode_ = this->swing_mode;
|
||||
this->last_mode_ = this->mode;
|
||||
this->last_fan_mode_ = this->fan_mode;
|
||||
this->last_target_temperature_ = this->target_temperature;
|
||||
|
||||
// Immediately publish the state change to Home Assistant
|
||||
this->publish_state();
|
||||
|
||||
return; // Exit early - don't send climate command
|
||||
}
|
||||
|
||||
// If we get here, send the regular climate command (temperature/mode/fan)
|
||||
uint8_t message1[RAS_2819T_MESSAGE_LENGTH] = {0};
|
||||
uint8_t message2[RAS_2819T_MESSAGE_LENGTH] = {0};
|
||||
float temperature =
|
||||
clamp<float>(this->target_temperature, TOSHIBA_RAS_2819T_TEMP_C_MIN, TOSHIBA_RAS_2819T_TEMP_C_MAX);
|
||||
|
||||
// Build first packet (RAS_2819T_HEADER1 + 4 bytes)
|
||||
message1[0] = (RAS_2819T_HEADER1 >> 8) & 0xFF;
|
||||
message1[1] = RAS_2819T_HEADER1 & 0xFF;
|
||||
|
||||
// Handle OFF mode
|
||||
if (this->mode == climate::CLIMATE_MODE_OFF) {
|
||||
// Extract bytes from power off command constant
|
||||
message1[2] = (RAS_2819T_POWER_OFF_COMMAND >> 24) & 0xFF;
|
||||
message1[3] = (RAS_2819T_POWER_OFF_COMMAND >> 16) & 0xFF;
|
||||
message1[4] = (RAS_2819T_POWER_OFF_COMMAND >> 8) & 0xFF;
|
||||
message1[5] = RAS_2819T_POWER_OFF_COMMAND & 0xFF;
|
||||
// No second packet for OFF
|
||||
} else {
|
||||
// Get temperature and fan encoding
|
||||
uint8_t temp_code = get_ras_2819t_temp_code(temperature);
|
||||
|
||||
// Get fan speed encoding for rc_code_1
|
||||
climate::ClimateFanMode effective_fan_mode = this->fan_mode.value();
|
||||
|
||||
// Dry mode only supports AUTO fan speed
|
||||
if (this->mode == climate::CLIMATE_MODE_DRY) {
|
||||
effective_fan_mode = climate::CLIMATE_FAN_AUTO;
|
||||
if (this->fan_mode.value() != climate::CLIMATE_FAN_AUTO) {
|
||||
ESP_LOGW(TAG, "Dry mode only supports AUTO fan speed, forcing AUTO");
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t fan_code = get_ras_2819t_fan_code(effective_fan_mode);
|
||||
|
||||
// Mode and temperature encoding
|
||||
switch (this->mode) {
|
||||
case climate::CLIMATE_MODE_COOL:
|
||||
// All cooling temperatures support fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = temp_code;
|
||||
message1[5] = ~temp_code;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT:
|
||||
// Heating supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
// Heat mode adds offset to temperature code
|
||||
message1[4] = temp_code | RAS_2819T_HEAT_TEMP_OFFSET;
|
||||
message1[5] = ~(temp_code | RAS_2819T_HEAT_TEMP_OFFSET);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT_COOL:
|
||||
// Auto mode uses fixed encoding
|
||||
message1[2] = RAS_2819T_AUTO_BYTE2;
|
||||
message1[3] = RAS_2819T_AUTO_BYTE3;
|
||||
message1[4] = temp_code | RAS_2819T_AUTO_TEMP_OFFSET;
|
||||
message1[5] = ~(temp_code | RAS_2819T_AUTO_TEMP_OFFSET);
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_DRY:
|
||||
// Dry mode uses fixed encoding and forces AUTO fan
|
||||
message1[2] = RAS_2819T_DRY_BYTE2;
|
||||
message1[3] = RAS_2819T_DRY_BYTE3;
|
||||
message1[4] = temp_code | RAS_2819T_DRY_TEMP_OFFSET;
|
||||
message1[5] = ~message1[4];
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_FAN_ONLY:
|
||||
// Fan only mode supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = RAS_2819T_FAN_ONLY_TEMP;
|
||||
message1[5] = RAS_2819T_FAN_ONLY_TEMP_INV;
|
||||
break;
|
||||
|
||||
default:
|
||||
// Default case supports fan speed control
|
||||
message1[2] = (fan_code >> 8) & 0xFF;
|
||||
message1[3] = fan_code & 0xFF;
|
||||
message1[4] = temp_code;
|
||||
message1[5] = ~temp_code;
|
||||
break;
|
||||
}
|
||||
|
||||
// Build second packet (RAS_2819T_HEADER2 + 4 bytes)
|
||||
message2[0] = RAS_2819T_HEADER2;
|
||||
|
||||
// Get fan speed encoding for rc_code_2
|
||||
Ras2819tSecondPacketCodes second_packet_codes = get_ras_2819t_second_packet_codes(effective_fan_mode);
|
||||
|
||||
// Determine header byte 2 and fan encoding based on mode
|
||||
switch (this->mode) {
|
||||
case climate::CLIMATE_MODE_COOL:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = second_packet_codes.suffix.byte4;
|
||||
message2[5] = second_packet_codes.suffix.byte5;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_HEAT_SUFFIX;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_HEAT_COOL:
|
||||
case climate::CLIMATE_MODE_DRY:
|
||||
// Auto/Dry modes use fixed values regardless of fan setting
|
||||
message2[1] = RAS_2819T_AUTO_DRY_FAN_BYTE;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = 0x00;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_AUTO_DRY_SUFFIX;
|
||||
break;
|
||||
|
||||
case climate::CLIMATE_MODE_FAN_ONLY:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = 0x00;
|
||||
message2[5] = RAS_2819T_HEAT_SUFFIX;
|
||||
break;
|
||||
|
||||
default:
|
||||
message2[1] = second_packet_codes.fan_byte;
|
||||
message2[2] = 0x00;
|
||||
message2[3] = second_packet_codes.suffix.byte3;
|
||||
message2[4] = second_packet_codes.suffix.byte4;
|
||||
message2[5] = second_packet_codes.suffix.byte5;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Log final messages being transmitted
|
||||
|
||||
// Transmit using proper Toshiba protocol timing
|
||||
auto transmit = this->transmitter_->transmit();
|
||||
auto *data = transmit.get_data();
|
||||
|
||||
// Use existing Toshiba encode function for proper timing
|
||||
this->encode_(data, message1, RAS_2819T_MESSAGE_LENGTH, 1);
|
||||
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF) {
|
||||
// Send second packet with gap
|
||||
this->encode_(data, message2, RAS_2819T_MESSAGE_LENGTH, 0);
|
||||
}
|
||||
|
||||
transmit.perform();
|
||||
|
||||
// Update all state tracking after successful transmission
|
||||
this->last_swing_mode_ = this->swing_mode;
|
||||
this->last_mode_ = this->mode;
|
||||
this->last_fan_mode_ = this->fan_mode;
|
||||
this->last_target_temperature_ = this->target_temperature;
|
||||
}
|
||||
|
||||
uint8_t ToshibaClimate::is_valid_rac_pt1411hwru_header_(const uint8_t *message) {
|
||||
const std::vector<uint8_t> header{RAC_PT1411HWRU_MESSAGE_HEADER0, RAC_PT1411HWRU_CS_HEADER,
|
||||
RAC_PT1411HWRU_SWING_HEADER};
|
||||
@@ -464,11 +967,11 @@ bool ToshibaClimate::compare_rac_pt1411hwru_packets_(const uint8_t *message1, co
|
||||
bool ToshibaClimate::is_valid_rac_pt1411hwru_message_(const uint8_t *message) {
|
||||
uint8_t checksum = 0;
|
||||
|
||||
switch (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
switch (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
case RAC_PT1411HWRU_MESSAGE_HEADER0:
|
||||
case RAC_PT1411HWRU_CS_HEADER:
|
||||
case RAC_PT1411HWRU_SWING_HEADER:
|
||||
if (is_valid_rac_pt1411hwru_header_(message) && (message[2] == static_cast<uint8_t>(~message[3])) &&
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message) && (message[2] == static_cast<uint8_t>(~message[3])) &&
|
||||
(message[4] == static_cast<uint8_t>(~message[5]))) {
|
||||
return true;
|
||||
}
|
||||
@@ -490,7 +993,103 @@ bool ToshibaClimate::is_valid_rac_pt1411hwru_message_(const uint8_t *message) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ToshibaClimate::process_ras_2819t_command_(const remote_base::ToshibaAcData &toshiba_data) {
|
||||
// Check for power-off command (single packet)
|
||||
if (toshiba_data.rc_code_2 == 0 && toshiba_data.rc_code_1 == RAS_2819T_POWER_OFF_COMMAND) {
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
ESP_LOGI(TAG, "Mode: OFF");
|
||||
this->publish_state();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for swing toggle command (single packet)
|
||||
if (toshiba_data.rc_code_2 == 0 && toshiba_data.rc_code_1 == RAS_2819T_SWING_TOGGLE) {
|
||||
// Toggle swing mode
|
||||
if (this->swing_mode == climate::CLIMATE_SWING_VERTICAL) {
|
||||
this->swing_mode = climate::CLIMATE_SWING_OFF;
|
||||
ESP_LOGI(TAG, "Swing: OFF");
|
||||
} else {
|
||||
this->swing_mode = climate::CLIMATE_SWING_VERTICAL;
|
||||
ESP_LOGI(TAG, "Swing: VERTICAL");
|
||||
}
|
||||
this->publish_state();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle regular two-packet commands (mode/temperature/fan changes)
|
||||
if (toshiba_data.rc_code_2 != 0) {
|
||||
// Convert to byte array for easier processing
|
||||
uint8_t message1[6], message2[6];
|
||||
for (uint8_t i = 0; i < 6; i++) {
|
||||
message1[i] = (toshiba_data.rc_code_1 >> (40 - i * 8)) & 0xFF;
|
||||
message2[i] = (toshiba_data.rc_code_2 >> (40 - i * 8)) & 0xFF;
|
||||
}
|
||||
|
||||
// Decode the protocol using message1 (rc_code_1)
|
||||
uint8_t temp_code = message1[4];
|
||||
|
||||
// Decode mode - check bytes 2-3 pattern and temperature code
|
||||
if ((message1[2] == 0x7B) && (message1[3] == 0x84)) {
|
||||
// OFF mode has specific pattern
|
||||
this->mode = climate::CLIMATE_MODE_OFF;
|
||||
ESP_LOGI(TAG, "Mode: OFF");
|
||||
} else if ((message1[2] == 0x1F) && (message1[3] == 0xE0)) {
|
||||
// 0x1FE0 pattern is used for AUTO, DRY, and low-temp COOL
|
||||
if ((temp_code & 0x0F) == 0x08) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT_COOL;
|
||||
ESP_LOGI(TAG, "Mode: AUTO");
|
||||
} else if ((temp_code & 0x0F) == 0x04) {
|
||||
this->mode = climate::CLIMATE_MODE_DRY;
|
||||
ESP_LOGI(TAG, "Mode: DRY");
|
||||
} else {
|
||||
this->mode = climate::CLIMATE_MODE_COOL;
|
||||
ESP_LOGI(TAG, "Mode: COOL (low temp)");
|
||||
}
|
||||
} else {
|
||||
// Variable fan speed patterns - decode by temperature code
|
||||
if ((temp_code & 0x0F) == 0x0C) {
|
||||
this->mode = climate::CLIMATE_MODE_HEAT;
|
||||
ESP_LOGI(TAG, "Mode: HEAT");
|
||||
} else if (message1[5] == 0x1B) {
|
||||
this->mode = climate::CLIMATE_MODE_FAN_ONLY;
|
||||
ESP_LOGI(TAG, "Mode: FAN_ONLY");
|
||||
} else {
|
||||
this->mode = climate::CLIMATE_MODE_COOL;
|
||||
ESP_LOGI(TAG, "Mode: COOL");
|
||||
}
|
||||
}
|
||||
|
||||
// Decode fan speed from rc_code_1
|
||||
uint16_t fan_code = (message1[2] << 8) | message1[3];
|
||||
this->fan_mode = decode_ras_2819t_fan_mode(fan_code);
|
||||
|
||||
// Decode temperature
|
||||
if (this->mode != climate::CLIMATE_MODE_OFF && this->mode != climate::CLIMATE_MODE_FAN_ONLY) {
|
||||
this->target_temperature = decode_ras_2819t_temperature(temp_code);
|
||||
}
|
||||
|
||||
this->publish_state();
|
||||
return true;
|
||||
} else {
|
||||
ESP_LOGD(TAG, "Unknown single-packet RAS-2819T command: 0x%" PRIX64, toshiba_data.rc_code_1);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
// Try modern ToshibaAcProtocol decoder first (handles RAS-2819T and potentially others)
|
||||
remote_base::ToshibaAcProtocol toshiba_protocol;
|
||||
auto decode_result = toshiba_protocol.decode(data);
|
||||
|
||||
if (decode_result.has_value()) {
|
||||
auto toshiba_data = decode_result.value();
|
||||
// Validate and process RAS-2819T commands
|
||||
if (is_valid_ras_2819t_command(toshiba_data.rc_code_1, toshiba_data.rc_code_2)) {
|
||||
return this->process_ras_2819t_command_(toshiba_data);
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to generic processing for older protocols
|
||||
uint8_t message[18] = {0};
|
||||
uint8_t message_length = TOSHIBA_HEADER_LENGTH, temperature_code = 0;
|
||||
|
||||
@@ -499,11 +1098,11 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
return false;
|
||||
}
|
||||
// Read incoming bits into buffer
|
||||
if (!decode_(&data, message, message_length)) {
|
||||
if (!this->decode_(&data, message, message_length)) {
|
||||
return false;
|
||||
}
|
||||
// Determine incoming message protocol version and/or length
|
||||
if (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// We already received four bytes
|
||||
message_length = RAC_PT1411HWRU_MESSAGE_LENGTH - 4;
|
||||
} else if ((message[0] ^ message[1] ^ message[2]) != message[3]) {
|
||||
@@ -514,11 +1113,11 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
message_length = message[2] + 2;
|
||||
}
|
||||
// Decode the remaining bytes
|
||||
if (!decode_(&data, &message[4], message_length)) {
|
||||
if (!this->decode_(&data, &message[4], message_length)) {
|
||||
return false;
|
||||
}
|
||||
// If this is a RAC-PT1411HWRU message, we expect the first packet a second time and also possibly a third packet
|
||||
if (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
if (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// There is always a space between packets
|
||||
if (!data.expect_item(TOSHIBA_BIT_MARK, TOSHIBA_GAP_SPACE)) {
|
||||
return false;
|
||||
@@ -527,7 +1126,7 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
if (!data.expect_item(TOSHIBA_HEADER_MARK, TOSHIBA_HEADER_SPACE)) {
|
||||
return false;
|
||||
}
|
||||
if (!decode_(&data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!this->decode_(&data, &message[6], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
return false;
|
||||
}
|
||||
// If this is a RAC-PT1411HWRU message, there may also be a third packet.
|
||||
@@ -535,25 +1134,25 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
if (data.expect_item(TOSHIBA_BIT_MARK, TOSHIBA_GAP_SPACE)) {
|
||||
// Validate header 3
|
||||
data.expect_item(TOSHIBA_HEADER_MARK, TOSHIBA_HEADER_SPACE);
|
||||
if (decode_(&data, &message[12], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
if (this->decode_(&data, &message[12], RAC_PT1411HWRU_MESSAGE_LENGTH)) {
|
||||
if (!this->is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
// If a third packet was received but the checksum is not valid, fail
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!compare_rac_pt1411hwru_packets_(&message[0], &message[6])) {
|
||||
if (!this->compare_rac_pt1411hwru_packets_(&message[0], &message[6])) {
|
||||
// If the first two packets don't match each other, fail
|
||||
return false;
|
||||
}
|
||||
if (!is_valid_rac_pt1411hwru_message_(&message[0])) {
|
||||
if (!this->is_valid_rac_pt1411hwru_message_(&message[0])) {
|
||||
// If the first packet isn't valid, fail
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Header has been verified, now determine protocol version and set the climate component properties
|
||||
switch (is_valid_rac_pt1411hwru_header_(message)) {
|
||||
switch (this->is_valid_rac_pt1411hwru_header_(message)) {
|
||||
// Power, temperature, mode, fan speed
|
||||
case RAC_PT1411HWRU_MESSAGE_HEADER0:
|
||||
// Get the mode
|
||||
@@ -608,7 +1207,7 @@ bool ToshibaClimate::on_receive(remote_base::RemoteReceiveData data) {
|
||||
break;
|
||||
}
|
||||
// Get the target temperature
|
||||
if (is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
if (this->is_valid_rac_pt1411hwru_message_(&message[12])) {
|
||||
temperature_code =
|
||||
(message[4] >> 4) | (message[14] & RAC_PT1411HWRU_FLAG_FRAC) | (message[15] & RAC_PT1411HWRU_FLAG_NEG);
|
||||
if (message[15] & RAC_PT1411HWRU_FLAG_FAH) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/climate_ir/climate_ir.h"
|
||||
#include "esphome/components/remote_base/toshiba_ac_protocol.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace toshiba {
|
||||
@@ -10,6 +11,7 @@ enum Model {
|
||||
MODEL_GENERIC = 0, // Temperature range is from 17 to 30
|
||||
MODEL_RAC_PT1411HWRU_C = 1, // Temperature range is from 16 to 30
|
||||
MODEL_RAC_PT1411HWRU_F = 2, // Temperature range is from 16 to 30
|
||||
MODEL_RAS_2819T = 3, // RAS-2819T protocol variant, temperature range 18 to 30
|
||||
};
|
||||
|
||||
// Supported temperature ranges
|
||||
@@ -19,6 +21,8 @@ const float TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN = 16.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX = 30.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_F_MIN = 60.0;
|
||||
const float TOSHIBA_RAC_PT1411HWRU_TEMP_F_MAX = 86.0;
|
||||
const float TOSHIBA_RAS_2819T_TEMP_C_MIN = 18.0;
|
||||
const float TOSHIBA_RAS_2819T_TEMP_C_MAX = 30.0;
|
||||
|
||||
class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
public:
|
||||
@@ -35,6 +39,9 @@ class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
void transmit_generic_();
|
||||
void transmit_rac_pt1411hwru_();
|
||||
void transmit_rac_pt1411hwru_temp_(bool cs_state = true, bool cs_send_update = true);
|
||||
void transmit_ras_2819t_();
|
||||
// Process RAS-2819T IR command data
|
||||
bool process_ras_2819t_command_(const remote_base::ToshibaAcData &toshiba_data);
|
||||
// Returns the header if valid, else returns zero
|
||||
uint8_t is_valid_rac_pt1411hwru_header_(const uint8_t *message);
|
||||
// Returns true if message is a valid RAC-PT1411HWRU IR message, regardless if first or second packet
|
||||
@@ -43,11 +50,26 @@ class ToshibaClimate : public climate_ir::ClimateIR {
|
||||
bool compare_rac_pt1411hwru_packets_(const uint8_t *message1, const uint8_t *message2);
|
||||
bool on_receive(remote_base::RemoteReceiveData data) override;
|
||||
|
||||
private:
|
||||
// RAS-2819T state tracking for swing mode optimization
|
||||
climate::ClimateSwingMode last_swing_mode_{climate::CLIMATE_SWING_OFF};
|
||||
climate::ClimateMode last_mode_{climate::CLIMATE_MODE_OFF};
|
||||
optional<climate::ClimateFanMode> last_fan_mode_{};
|
||||
float last_target_temperature_{24.0f};
|
||||
|
||||
float temperature_min_() {
|
||||
return (this->model_ == MODEL_GENERIC) ? TOSHIBA_GENERIC_TEMP_C_MIN : TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN;
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F)
|
||||
return TOSHIBA_RAC_PT1411HWRU_TEMP_C_MIN;
|
||||
if (this->model_ == MODEL_RAS_2819T)
|
||||
return TOSHIBA_RAS_2819T_TEMP_C_MIN;
|
||||
return TOSHIBA_GENERIC_TEMP_C_MIN; // Default to GENERIC for unknown models
|
||||
}
|
||||
float temperature_max_() {
|
||||
return (this->model_ == MODEL_GENERIC) ? TOSHIBA_GENERIC_TEMP_C_MAX : TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX;
|
||||
if (this->model_ == MODEL_RAC_PT1411HWRU_C || this->model_ == MODEL_RAC_PT1411HWRU_F)
|
||||
return TOSHIBA_RAC_PT1411HWRU_TEMP_C_MAX;
|
||||
if (this->model_ == MODEL_RAS_2819T)
|
||||
return TOSHIBA_RAS_2819T_TEMP_C_MAX;
|
||||
return TOSHIBA_GENERIC_TEMP_C_MAX; // Default to GENERIC for unknown models
|
||||
}
|
||||
std::set<climate::ClimateSwingMode> toshiba_swing_modes_() {
|
||||
return (this->model_ == MODEL_GENERIC)
|
||||
|
||||
@@ -402,8 +402,8 @@ async def to_code(config):
|
||||
add_idf_sdkconfig_option("CONFIG_LWIP_DHCPS", False)
|
||||
|
||||
# Disable Enterprise WiFi support if no EAP is configured
|
||||
if CORE.is_esp32 and not has_eap:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", False)
|
||||
if CORE.is_esp32:
|
||||
add_idf_sdkconfig_option("CONFIG_ESP_WIFI_ENTERPRISE_SUPPORT", has_eap)
|
||||
|
||||
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
|
||||
cg.add(var.set_power_save_mode(config[CONF_POWER_SAVE_MODE]))
|
||||
|
||||
@@ -3,6 +3,7 @@ from esphome.components import i2c, sensor
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ID,
|
||||
CONF_OVERSAMPLING,
|
||||
CONF_PRESSURE,
|
||||
CONF_TEMPERATURE,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
@@ -18,6 +19,17 @@ CODEOWNERS = ["@gcormier"]
|
||||
CONF_K_VALUE = "k_value"
|
||||
|
||||
xgzp68xx_ns = cg.esphome_ns.namespace("xgzp68xx")
|
||||
XGZP68XXOversampling = xgzp68xx_ns.enum("XGZP68XXOversampling")
|
||||
OVERSAMPLING_OPTIONS = {
|
||||
"256X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_256X,
|
||||
"512X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_512X,
|
||||
"1024X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_1024X,
|
||||
"2048X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_2048X,
|
||||
"4096X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_4096X,
|
||||
"8192X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_8192X,
|
||||
"16384X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_16384X,
|
||||
"32768X": XGZP68XXOversampling.XGZP68XX_OVERSAMPLING_32768X,
|
||||
}
|
||||
XGZP68XXComponent = xgzp68xx_ns.class_(
|
||||
"XGZP68XXComponent", cg.PollingComponent, i2c.I2CDevice
|
||||
)
|
||||
@@ -31,6 +43,12 @@ CONFIG_SCHEMA = (
|
||||
accuracy_decimals=1,
|
||||
device_class=DEVICE_CLASS_PRESSURE,
|
||||
state_class=STATE_CLASS_MEASUREMENT,
|
||||
).extend(
|
||||
{
|
||||
cv.Optional(CONF_OVERSAMPLING, default="4096X"): cv.enum(
|
||||
OVERSAMPLING_OPTIONS, upper=True
|
||||
),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_TEMPERATURE): sensor.sensor_schema(
|
||||
unit_of_measurement=UNIT_CELSIUS,
|
||||
@@ -58,5 +76,6 @@ async def to_code(config):
|
||||
if pressure_config := config.get(CONF_PRESSURE):
|
||||
sens = await sensor.new_sensor(pressure_config)
|
||||
cg.add(var.set_pressure_sensor(sens))
|
||||
cg.add(var.set_pressure_oversampling(pressure_config[CONF_OVERSAMPLING]))
|
||||
|
||||
cg.add(var.set_k_value(config[CONF_K_VALUE]))
|
||||
|
||||
@@ -16,16 +16,49 @@ static const uint8_t SYSCONFIG_ADDRESS = 0xA5;
|
||||
static const uint8_t PCONFIG_ADDRESS = 0xA6;
|
||||
static const uint8_t READ_COMMAND = 0x0A;
|
||||
|
||||
[[maybe_unused]] static const char *oversampling_to_str(XGZP68XXOversampling oversampling) {
|
||||
switch (oversampling) {
|
||||
case XGZP68XX_OVERSAMPLING_256X:
|
||||
return "256x";
|
||||
case XGZP68XX_OVERSAMPLING_512X:
|
||||
return "512x";
|
||||
case XGZP68XX_OVERSAMPLING_1024X:
|
||||
return "1024x";
|
||||
case XGZP68XX_OVERSAMPLING_2048X:
|
||||
return "2048x";
|
||||
case XGZP68XX_OVERSAMPLING_4096X:
|
||||
return "4096x";
|
||||
case XGZP68XX_OVERSAMPLING_8192X:
|
||||
return "8192x";
|
||||
case XGZP68XX_OVERSAMPLING_16384X:
|
||||
return "16384x";
|
||||
case XGZP68XX_OVERSAMPLING_32768X:
|
||||
return "32768x";
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::update() {
|
||||
// Do we need to change oversampling?
|
||||
if (this->last_pressure_oversampling_ != this->pressure_oversampling_) {
|
||||
uint8_t oldconfig = 0;
|
||||
this->read_register(PCONFIG_ADDRESS, &oldconfig, 1);
|
||||
uint8_t newconfig = (oldconfig & 0xf8) | (this->pressure_oversampling_ & 0x7);
|
||||
this->write_register(PCONFIG_ADDRESS, &newconfig, 1);
|
||||
ESP_LOGD(TAG, "oversampling to %s: oldconfig = 0x%x newconfig = 0x%x",
|
||||
oversampling_to_str(this->pressure_oversampling_), oldconfig, newconfig);
|
||||
this->last_pressure_oversampling_ = this->pressure_oversampling_;
|
||||
}
|
||||
|
||||
// Request temp + pressure acquisition
|
||||
this->write_register(0x30, &READ_COMMAND, 1);
|
||||
|
||||
// Wait 20mS per datasheet
|
||||
this->set_timeout("measurement", 20, [this]() {
|
||||
uint8_t data[5];
|
||||
uint32_t pressure_raw;
|
||||
uint16_t temperature_raw;
|
||||
float pressure_in_pa, temperature;
|
||||
uint8_t data[5] = {};
|
||||
uint32_t pressure_raw = 0;
|
||||
uint16_t temperature_raw = 0;
|
||||
int success;
|
||||
|
||||
// Read the sensor data
|
||||
@@ -42,23 +75,11 @@ void XGZP68XXComponent::update() {
|
||||
ESP_LOGV(TAG, "Got raw pressure=%" PRIu32 ", raw temperature=%u", pressure_raw, temperature_raw);
|
||||
ESP_LOGV(TAG, "K value is %u", this->k_value_);
|
||||
|
||||
// The most significant bit of both pressure and temperature will be 1 to indicate a negative value.
|
||||
// This is directly from the datasheet, and the calculations below will handle this.
|
||||
if (pressure_raw > pow(2, 23)) {
|
||||
// Negative pressure
|
||||
pressure_in_pa = (pressure_raw - pow(2, 24)) / (float) (this->k_value_);
|
||||
} else {
|
||||
// Positive pressure
|
||||
pressure_in_pa = pressure_raw / (float) (this->k_value_);
|
||||
}
|
||||
// Sign extend the pressure
|
||||
float pressure_in_pa = (float) (((int32_t) pressure_raw << 8) >> 8);
|
||||
pressure_in_pa /= (float) (this->k_value_);
|
||||
|
||||
if (temperature_raw > pow(2, 15)) {
|
||||
// Negative temperature
|
||||
temperature = (float) (temperature_raw - pow(2, 16)) / 256.0f;
|
||||
} else {
|
||||
// Positive temperature
|
||||
temperature = (float) temperature_raw / 256.0f;
|
||||
}
|
||||
float temperature = ((float) (int16_t) temperature_raw) / 256.0f;
|
||||
|
||||
if (this->pressure_sensor_ != nullptr)
|
||||
this->pressure_sensor_->publish_state(pressure_in_pa);
|
||||
@@ -69,20 +90,27 @@ void XGZP68XXComponent::update() {
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::setup() {
|
||||
uint8_t config;
|
||||
uint8_t config1 = 0, config2 = 0;
|
||||
|
||||
// Display some sample bits to confirm we are talking to the sensor
|
||||
this->read_register(SYSCONFIG_ADDRESS, &config, 1);
|
||||
ESP_LOGCONFIG(TAG,
|
||||
"Gain value is %d\n"
|
||||
"XGZP68xx started!",
|
||||
(config >> 3) & 0b111);
|
||||
if (i2c::ErrorCode::ERROR_OK != this->read_register(SYSCONFIG_ADDRESS, &config1, 1)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
if (i2c::ErrorCode::ERROR_OK != this->read_register(PCONFIG_ADDRESS, &config2, 1)) {
|
||||
this->mark_failed();
|
||||
return;
|
||||
}
|
||||
ESP_LOGD(TAG, "sys_config 0x%x, p_config 0x%x", config1, config2);
|
||||
}
|
||||
|
||||
void XGZP68XXComponent::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, "XGZP68xx:");
|
||||
LOG_SENSOR(" ", "Temperature: ", this->temperature_sensor_);
|
||||
LOG_SENSOR(" ", "Pressure: ", this->pressure_sensor_);
|
||||
if (this->pressure_sensor_ != nullptr) {
|
||||
ESP_LOGCONFIG(TAG, " Oversampling: %s", oversampling_to_str(this->pressure_oversampling_));
|
||||
}
|
||||
LOG_I2C_DEVICE(this);
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, " Connection failed");
|
||||
|
||||
@@ -7,11 +7,29 @@
|
||||
namespace esphome {
|
||||
namespace xgzp68xx {
|
||||
|
||||
/// Enum listing all oversampling options for the XGZP68XX.
|
||||
enum XGZP68XXOversampling : uint8_t {
|
||||
XGZP68XX_OVERSAMPLING_256X = 0b100,
|
||||
XGZP68XX_OVERSAMPLING_512X = 0b101,
|
||||
XGZP68XX_OVERSAMPLING_1024X = 0b000,
|
||||
XGZP68XX_OVERSAMPLING_2048X = 0b001,
|
||||
XGZP68XX_OVERSAMPLING_4096X = 0b010,
|
||||
XGZP68XX_OVERSAMPLING_8192X = 0b011,
|
||||
XGZP68XX_OVERSAMPLING_16384X = 0b110,
|
||||
XGZP68XX_OVERSAMPLING_32768X = 0b111,
|
||||
|
||||
XGZP68XX_OVERSAMPLING_UNKNOWN = (uint8_t) -1,
|
||||
};
|
||||
|
||||
class XGZP68XXComponent : public PollingComponent, public sensor::Sensor, public i2c::I2CDevice {
|
||||
public:
|
||||
SUB_SENSOR(temperature)
|
||||
SUB_SENSOR(pressure)
|
||||
void set_k_value(uint16_t k_value) { this->k_value_ = k_value; }
|
||||
/// Set the pressure oversampling value. Defaults to 4096X.
|
||||
void set_pressure_oversampling(XGZP68XXOversampling pressure_oversampling) {
|
||||
this->pressure_oversampling_ = pressure_oversampling;
|
||||
}
|
||||
|
||||
void update() override;
|
||||
void setup() override;
|
||||
@@ -21,6 +39,8 @@ class XGZP68XXComponent : public PollingComponent, public sensor::Sensor, public
|
||||
/// Internal method to read the pressure from the component after it has been scheduled.
|
||||
void read_pressure_();
|
||||
uint16_t k_value_;
|
||||
XGZP68XXOversampling pressure_oversampling_{XGZP68XX_OVERSAMPLING_4096X};
|
||||
XGZP68XXOversampling last_pressure_oversampling_{XGZP68XX_OVERSAMPLING_UNKNOWN};
|
||||
};
|
||||
|
||||
} // namespace xgzp68xx
|
||||
|
||||
@@ -222,18 +222,25 @@ def copy_files():
|
||||
] in ["xiao_ble"]:
|
||||
fake_board_manifest = """
|
||||
{
|
||||
"frameworks": [
|
||||
"zephyr"
|
||||
],
|
||||
"name": "esphome nrf52",
|
||||
"upload": {
|
||||
"maximum_ram_size": 248832,
|
||||
"maximum_size": 815104
|
||||
},
|
||||
"url": "https://esphome.io/",
|
||||
"vendor": "esphome"
|
||||
"frameworks": [
|
||||
"zephyr"
|
||||
],
|
||||
"name": "esphome nrf52",
|
||||
"upload": {
|
||||
"maximum_ram_size": 248832,
|
||||
"maximum_size": 815104,
|
||||
"speed": 115200
|
||||
},
|
||||
"url": "https://esphome.io/",
|
||||
"vendor": "esphome",
|
||||
"build": {
|
||||
"softdevice": {
|
||||
"sd_fwid": "0x00B6"
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
write_file_if_changed(
|
||||
CORE.relative_build_path(f"boards/{zephyr_data()[KEY_BOARD]}.json"),
|
||||
fake_board_manifest,
|
||||
|
||||
@@ -12,7 +12,7 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome import core, loader, pins, yaml_util
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
from esphome.config_helpers import Extend, Remove, merge_dicts_ordered
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_ESPHOME,
|
||||
@@ -922,10 +922,9 @@ def validate_config(
|
||||
if CONF_SUBSTITUTIONS in config or command_line_substitutions:
|
||||
from esphome.components import substitutions
|
||||
|
||||
result[CONF_SUBSTITUTIONS] = {
|
||||
**(config.get(CONF_SUBSTITUTIONS) or {}),
|
||||
**command_line_substitutions,
|
||||
}
|
||||
result[CONF_SUBSTITUTIONS] = merge_dicts_ordered(
|
||||
config.get(CONF_SUBSTITUTIONS) or {}, command_line_substitutions
|
||||
)
|
||||
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
||||
try:
|
||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||
|
||||
@@ -10,6 +10,7 @@ from esphome.const import (
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
# Pre-build lookup map from (platform, framework) tuples to PlatformFramework enum
|
||||
_PLATFORM_FRAMEWORK_LOOKUP = {
|
||||
@@ -17,6 +18,25 @@ _PLATFORM_FRAMEWORK_LOOKUP = {
|
||||
}
|
||||
|
||||
|
||||
def merge_dicts_ordered(*dicts: dict) -> OrderedDict:
|
||||
"""Merge multiple dicts into an OrderedDict, preserving key order.
|
||||
|
||||
This is a helper to ensure that dictionary merging preserves OrderedDict type,
|
||||
which is important for operations like move_to_end().
|
||||
|
||||
Args:
|
||||
*dicts: Variable number of dictionaries to merge (later dicts override earlier ones)
|
||||
|
||||
Returns:
|
||||
OrderedDict with merged contents
|
||||
"""
|
||||
result = OrderedDict()
|
||||
for d in dicts:
|
||||
if d:
|
||||
result.update(d)
|
||||
return result
|
||||
|
||||
|
||||
class Extend:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
@@ -60,7 +80,11 @@ def merge_config(full_old, full_new):
|
||||
if isinstance(new, dict):
|
||||
if not isinstance(old, dict):
|
||||
return new
|
||||
res = old.copy()
|
||||
# Preserve OrderedDict type by copying to OrderedDict if either input is OrderedDict
|
||||
if isinstance(old, OrderedDict) or isinstance(new, OrderedDict):
|
||||
res = OrderedDict(old)
|
||||
else:
|
||||
res = old.copy()
|
||||
for k, v in new.items():
|
||||
if isinstance(v, Remove) and k in old:
|
||||
del res[k]
|
||||
|
||||
@@ -244,6 +244,20 @@ RESERVED_IDS = [
|
||||
"uart0",
|
||||
"uart1",
|
||||
"uart2",
|
||||
# ESP32 ROM functions
|
||||
"crc16_be",
|
||||
"crc16_le",
|
||||
"crc32_be",
|
||||
"crc32_le",
|
||||
"crc8_be",
|
||||
"crc8_le",
|
||||
"dbg_state",
|
||||
"debug_timer",
|
||||
"one_bits",
|
||||
"recv_packet",
|
||||
"send_packet",
|
||||
"check_pos",
|
||||
"software_reset",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -696,6 +696,7 @@ CONF_OPEN_DRAIN = "open_drain"
|
||||
CONF_OPEN_DRAIN_INTERRUPT = "open_drain_interrupt"
|
||||
CONF_OPEN_DURATION = "open_duration"
|
||||
CONF_OPEN_ENDSTOP = "open_endstop"
|
||||
CONF_OPENTHREAD = "openthread"
|
||||
CONF_OPERATION = "operation"
|
||||
CONF_OPTIMISTIC = "optimistic"
|
||||
CONF_OPTION = "option"
|
||||
@@ -1299,6 +1300,7 @@ DEVICE_CLASS_SULPHUR_DIOXIDE = "sulphur_dioxide"
|
||||
DEVICE_CLASS_SWITCH = "switch"
|
||||
DEVICE_CLASS_TAMPER = "tamper"
|
||||
DEVICE_CLASS_TEMPERATURE = "temperature"
|
||||
DEVICE_CLASS_TEMPERATURE_DELTA = "temperature_delta"
|
||||
DEVICE_CLASS_TIMESTAMP = "timestamp"
|
||||
DEVICE_CLASS_UPDATE = "update"
|
||||
DEVICE_CLASS_VIBRATION = "vibration"
|
||||
|
||||
@@ -10,6 +10,10 @@ from esphome.helpers import get_bool_env
|
||||
|
||||
from .util.password import password_hash
|
||||
|
||||
# Sentinel file name used for CORE.config_path when dashboard initializes.
|
||||
# This ensures .parent returns the config directory instead of root.
|
||||
_DASHBOARD_SENTINEL_FILE = "___DASHBOARD_SENTINEL___.yaml"
|
||||
|
||||
|
||||
class DashboardSettings:
|
||||
"""Settings for the dashboard."""
|
||||
@@ -48,7 +52,12 @@ class DashboardSettings:
|
||||
self.config_dir = Path(args.configuration)
|
||||
self.absolute_config_dir = self.config_dir.resolve()
|
||||
self.verbose = args.verbose
|
||||
CORE.config_path = self.config_dir / "."
|
||||
# Set to a sentinel file so .parent gives us the config directory.
|
||||
# Previously this was `os.path.join(self.config_dir, ".")` which worked because
|
||||
# os.path.dirname("/config/.") returns "/config", but Path("/config/.").parent
|
||||
# normalizes to Path("/config") first, then .parent returns Path("/"), breaking
|
||||
# secret resolution. Using a sentinel file ensures .parent gives the correct directory.
|
||||
CORE.config_path = self.config_dir / _DASHBOARD_SENTINEL_FILE
|
||||
|
||||
@property
|
||||
def relative_url(self) -> str:
|
||||
|
||||
@@ -1058,7 +1058,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
"download",
|
||||
f"{storage_json.name}-{file_name}",
|
||||
)
|
||||
path = storage_json.firmware_bin_path.with_name(file_name)
|
||||
|
||||
path = storage_json.firmware_bin_path.parent.joinpath(file_name)
|
||||
|
||||
if not path.is_file():
|
||||
args = ["esphome", "idedata", settings.rel_path(configuration)]
|
||||
|
||||
@@ -242,7 +242,7 @@ def send_check(
|
||||
|
||||
|
||||
def perform_ota(
|
||||
sock: socket.socket, password: str, file_handle: io.IOBase, filename: Path
|
||||
sock: socket.socket, password: str | None, file_handle: io.IOBase, filename: Path
|
||||
) -> None:
|
||||
file_contents = file_handle.read()
|
||||
file_size = len(file_contents)
|
||||
@@ -278,13 +278,13 @@ def perform_ota(
|
||||
|
||||
def perform_auth(
|
||||
sock: socket.socket,
|
||||
password: str,
|
||||
password: str | None,
|
||||
hash_func: Callable[..., Any],
|
||||
nonce_size: int,
|
||||
hash_name: str,
|
||||
) -> None:
|
||||
"""Perform challenge-response authentication using specified hash algorithm."""
|
||||
if not password:
|
||||
if password is None:
|
||||
raise OTAError("ESP requests password, but no password given!")
|
||||
|
||||
nonce_bytes = receive_exactly(
|
||||
@@ -385,7 +385,7 @@ def perform_ota(
|
||||
|
||||
|
||||
def run_ota_impl_(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
from esphome.core import CORE
|
||||
|
||||
@@ -436,7 +436,7 @@ def run_ota_impl_(
|
||||
|
||||
|
||||
def run_ota(
|
||||
remote_host: str | list[str], remote_port: int, password: str, filename: Path
|
||||
remote_host: str | list[str], remote_port: int, password: str | None, filename: Path
|
||||
) -> tuple[int, str | None]:
|
||||
try:
|
||||
return run_ota_impl_(remote_host, remote_port, password, filename)
|
||||
|
||||
150
esphome/git.py
150
esphome/git.py
@@ -5,6 +5,7 @@ import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
|
||||
@@ -17,14 +18,60 @@ _LOGGER = logging.getLogger(__name__)
|
||||
NEVER_REFRESH = TimePeriodSeconds(seconds=-1)
|
||||
|
||||
|
||||
def run_git_command(cmd, cwd=None) -> str:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
class GitException(cv.Invalid):
|
||||
"""Base exception for git-related errors."""
|
||||
|
||||
|
||||
class GitNotInstalledError(GitException):
|
||||
"""Exception raised when git is not installed on the system."""
|
||||
|
||||
|
||||
class GitCommandError(GitException):
|
||||
"""Exception raised when a git command fails."""
|
||||
|
||||
|
||||
class GitRepositoryError(GitException):
|
||||
"""Exception raised when a git repository is in an invalid state."""
|
||||
|
||||
|
||||
def run_git_command(cmd: list[str], git_dir: Path | None = None) -> str:
|
||||
if git_dir is not None:
|
||||
_LOGGER.debug(
|
||||
"Running git command with repository isolation: %s (git_dir=%s)",
|
||||
" ".join(cmd),
|
||||
git_dir,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("Running git command: %s", " ".join(cmd))
|
||||
|
||||
# Set up environment for repository isolation if git_dir is provided
|
||||
# Force git to only operate on this specific repository by setting
|
||||
# GIT_DIR and GIT_WORK_TREE. This prevents git from walking up the
|
||||
# directory tree to find parent repositories when the target repo's
|
||||
# .git directory is corrupt. Without this, commands like 'git stash'
|
||||
# could accidentally operate on parent repositories (e.g., the main
|
||||
# ESPHome repo) instead of failing, causing data loss.
|
||||
env: dict[str, str] | None = None
|
||||
cwd: str | None = None
|
||||
if git_dir is not None:
|
||||
env = {
|
||||
**subprocess.os.environ,
|
||||
"GIT_DIR": str(Path(git_dir) / ".git"),
|
||||
"GIT_WORK_TREE": str(git_dir),
|
||||
}
|
||||
cwd = str(git_dir)
|
||||
|
||||
try:
|
||||
ret = subprocess.run(
|
||||
cmd, cwd=cwd, capture_output=True, check=False, close_fds=False
|
||||
cmd,
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
close_fds=False,
|
||||
env=env,
|
||||
)
|
||||
except FileNotFoundError as err:
|
||||
raise cv.Invalid(
|
||||
raise GitNotInstalledError(
|
||||
"git is not installed but required for external_components.\n"
|
||||
"Please see https://git-scm.com/book/en/v2/Getting-Started-Installing-Git for installing git"
|
||||
) from err
|
||||
@@ -33,8 +80,8 @@ def run_git_command(cmd, cwd=None) -> str:
|
||||
err_str = ret.stderr.decode("utf-8")
|
||||
lines = [x.strip() for x in err_str.splitlines()]
|
||||
if lines[-1].startswith("fatal:"):
|
||||
raise cv.Invalid(lines[-1][len("fatal: ") :])
|
||||
raise cv.Invalid(err_str)
|
||||
raise GitCommandError(lines[-1][len("fatal: ") :])
|
||||
raise GitCommandError(err_str)
|
||||
|
||||
return ret.stdout.decode("utf-8").strip()
|
||||
|
||||
@@ -55,6 +102,7 @@ def clone_or_update(
|
||||
username: str = None,
|
||||
password: str = None,
|
||||
submodules: list[str] | None = None,
|
||||
_recover_broken: bool = True,
|
||||
) -> tuple[Path, Callable[[], None] | None]:
|
||||
key = f"{url}@{ref}"
|
||||
|
||||
@@ -75,15 +123,15 @@ def clone_or_update(
|
||||
# We need to fetch the PR branch first, otherwise git will complain
|
||||
# about missing objects
|
||||
_LOGGER.info("Fetching %s", ref)
|
||||
run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir))
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
||||
run_git_command(["git", "fetch", "--", "origin", ref], git_dir=repo_dir)
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], git_dir=repo_dir)
|
||||
|
||||
if submodules is not None:
|
||||
_LOGGER.info(
|
||||
"Initialising submodules (%s) for %s", ", ".join(submodules), key
|
||||
"Initializing submodules (%s) for %s", ", ".join(submodules), key
|
||||
)
|
||||
run_git_command(
|
||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
||||
["git", "submodule", "update", "--init"] + submodules, git_dir=repo_dir
|
||||
)
|
||||
|
||||
else:
|
||||
@@ -99,32 +147,82 @@ def clone_or_update(
|
||||
file_timestamp = Path(repo_dir / ".git" / "HEAD")
|
||||
age = datetime.now() - datetime.fromtimestamp(file_timestamp.stat().st_mtime)
|
||||
if refresh is None or age.total_seconds() > refresh.total_seconds:
|
||||
old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir))
|
||||
_LOGGER.info("Updating %s", key)
|
||||
_LOGGER.debug("Location: %s", repo_dir)
|
||||
# Stash local changes (if any)
|
||||
run_git_command(
|
||||
["git", "stash", "push", "--include-untracked"], str(repo_dir)
|
||||
)
|
||||
# Fetch remote ref
|
||||
cmd = ["git", "fetch", "--", "origin"]
|
||||
if ref is not None:
|
||||
cmd.append(ref)
|
||||
run_git_command(cmd, str(repo_dir))
|
||||
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
||||
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
|
||||
# Try to update the repository, recovering from broken state if needed
|
||||
old_sha: str | None = None
|
||||
try:
|
||||
# First verify the repository is valid by checking HEAD
|
||||
# Use git_dir parameter to prevent git from walking up to parent repos
|
||||
old_sha = run_git_command(
|
||||
["git", "rev-parse", "HEAD"], git_dir=repo_dir
|
||||
)
|
||||
|
||||
_LOGGER.info("Updating %s", key)
|
||||
_LOGGER.debug("Location: %s", repo_dir)
|
||||
|
||||
# Stash local changes (if any)
|
||||
# Use git_dir to ensure this only affects the specific repo
|
||||
run_git_command(
|
||||
["git", "stash", "push", "--include-untracked"],
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
|
||||
# Fetch remote ref
|
||||
cmd = ["git", "fetch", "--", "origin"]
|
||||
if ref is not None:
|
||||
cmd.append(ref)
|
||||
run_git_command(cmd, git_dir=repo_dir)
|
||||
|
||||
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
|
||||
run_git_command(
|
||||
["git", "reset", "--hard", "FETCH_HEAD"],
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
except GitException as err:
|
||||
# Repository is in a broken state or update failed
|
||||
# Only attempt recovery once to prevent infinite recursion
|
||||
if not _recover_broken:
|
||||
_LOGGER.error(
|
||||
"Repository %s recovery failed, cannot retry (already attempted once)",
|
||||
key,
|
||||
)
|
||||
raise
|
||||
|
||||
_LOGGER.warning(
|
||||
"Repository %s has issues (%s), attempting recovery",
|
||||
key,
|
||||
err,
|
||||
)
|
||||
_LOGGER.info("Removing broken repository at %s", repo_dir)
|
||||
shutil.rmtree(repo_dir)
|
||||
_LOGGER.info("Successfully removed broken repository, re-cloning...")
|
||||
|
||||
# Recursively call clone_or_update to re-clone
|
||||
# Set _recover_broken=False to prevent infinite recursion
|
||||
result = clone_or_update(
|
||||
url=url,
|
||||
ref=ref,
|
||||
refresh=refresh,
|
||||
domain=domain,
|
||||
username=username,
|
||||
password=password,
|
||||
submodules=submodules,
|
||||
_recover_broken=False,
|
||||
)
|
||||
_LOGGER.info("Repository %s successfully recovered", key)
|
||||
return result
|
||||
|
||||
if submodules is not None:
|
||||
_LOGGER.info(
|
||||
"Updating submodules (%s) for %s", ", ".join(submodules), key
|
||||
)
|
||||
run_git_command(
|
||||
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
|
||||
["git", "submodule", "update", "--init"] + submodules,
|
||||
git_dir=repo_dir,
|
||||
)
|
||||
|
||||
def revert():
|
||||
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
|
||||
run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir))
|
||||
run_git_command(["git", "reset", "--hard", old_sha], git_dir=repo_dir)
|
||||
|
||||
return repo_dir, revert
|
||||
|
||||
|
||||
@@ -374,3 +374,23 @@ class IDEData:
|
||||
return f"{self.cc_path[:-7]}addr2line.exe"
|
||||
|
||||
return f"{self.cc_path[:-3]}addr2line"
|
||||
|
||||
@property
|
||||
def objdump_path(self) -> str:
|
||||
# replace gcc at end with objdump
|
||||
path = self.cc_path
|
||||
return (
|
||||
f"{path[:-7]}objdump.exe"
|
||||
if path.endswith(".exe")
|
||||
else f"{path[:-3]}objdump"
|
||||
)
|
||||
|
||||
@property
|
||||
def readelf_path(self) -> str:
|
||||
# replace gcc at end with readelf
|
||||
path = self.cc_path
|
||||
return (
|
||||
f"{path[:-7]}readelf.exe"
|
||||
if path.endswith(".exe")
|
||||
else f"{path[:-3]}readelf"
|
||||
)
|
||||
|
||||
@@ -12,7 +12,7 @@ platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20251013.0
|
||||
aioesphomeapi==42.0.0
|
||||
aioesphomeapi==42.2.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pylint==4.0.1
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.1 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ from typing import Any
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from helpers import BASE_BUS_COMPONENTS
|
||||
|
||||
from esphome import yaml_util
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
|
||||
@@ -50,21 +52,22 @@ PACKAGE_DEPENDENCIES = {
|
||||
|
||||
# Bus types that can be defined directly in config files
|
||||
# Components defining these directly cannot be grouped (they create unique bus IDs)
|
||||
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
|
||||
DIRECT_BUS_TYPES = (
|
||||
"i2c",
|
||||
"spi",
|
||||
"uart",
|
||||
"modbus",
|
||||
"remote_transmitter",
|
||||
"remote_receiver",
|
||||
)
|
||||
|
||||
# Signature for components with no bus requirements
|
||||
# These components can be merged with any other group
|
||||
NO_BUSES_SIGNATURE = "no_buses"
|
||||
|
||||
# Base bus components - these ARE the bus implementations and should not
|
||||
# be flagged as needing migration since they are the platform/base components
|
||||
BASE_BUS_COMPONENTS = {
|
||||
"i2c",
|
||||
"spi",
|
||||
"uart",
|
||||
"modbus",
|
||||
"canbus",
|
||||
}
|
||||
# Prefix for isolated component signatures
|
||||
# Isolated components have unique signatures and cannot be merged with others
|
||||
ISOLATED_SIGNATURE_PREFIX = "isolated_"
|
||||
|
||||
# Components that must be tested in isolation (not grouped or batched with others)
|
||||
# These have known build issues that prevent grouping
|
||||
@@ -75,11 +78,10 @@ ISOLATED_COMPONENTS = {
|
||||
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||
"mapping": "Uses dict format for image/display sections incompatible with standard list format - ESPHome merge_config cannot handle",
|
||||
"openthread": "Conflicts with wifi: used by most components",
|
||||
"openthread_info": "Conflicts with wifi: used by most components",
|
||||
"matrix_keypad": "Needs isolation due to keypad",
|
||||
"mcp4725": "no YAML config to specify i2c bus id",
|
||||
"mcp47a1": "no YAML config to specify i2c bus id",
|
||||
"modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
|
||||
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
|
||||
"packages": "cannot merge packages",
|
||||
@@ -368,6 +370,143 @@ def analyze_all_components(
|
||||
return components, non_groupable, direct_bus_components
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _get_bus_configs(buses: tuple[str, ...]) -> frozenset[tuple[str, str]]:
|
||||
"""Map bus type to set of configs for that type.
|
||||
|
||||
Args:
|
||||
buses: Tuple of bus package names (e.g., ("uart_9600", "i2c"))
|
||||
|
||||
Returns:
|
||||
Frozenset of (base_type, full_config) tuples
|
||||
Example: frozenset({("uart", "uart_9600"), ("i2c", "i2c")})
|
||||
"""
|
||||
# Split on underscore to get base type: "uart_9600" -> "uart", "i2c" -> "i2c"
|
||||
return frozenset((bus.split("_", 1)[0], bus) for bus in buses)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def are_buses_compatible(buses1: tuple[str, ...], buses2: tuple[str, ...]) -> bool:
|
||||
"""Check if two bus tuples are compatible for merging.
|
||||
|
||||
Two bus lists are compatible if they don't have conflicting configurations
|
||||
for the same bus type. For example:
|
||||
- ("ble", "uart") and ("i2c",) are compatible (different buses)
|
||||
- ("uart_9600",) and ("uart_19200",) are NOT compatible (same bus, different configs)
|
||||
- ("uart_9600",) and ("uart_9600",) are compatible (same bus, same config)
|
||||
|
||||
Args:
|
||||
buses1: First tuple of bus package names
|
||||
buses2: Second tuple of bus package names
|
||||
|
||||
Returns:
|
||||
True if buses can be merged without conflicts
|
||||
"""
|
||||
configs1 = _get_bus_configs(buses1)
|
||||
configs2 = _get_bus_configs(buses2)
|
||||
|
||||
# Group configs by base type
|
||||
bus_types1: dict[str, set[str]] = {}
|
||||
for base_type, full_config in configs1:
|
||||
if base_type not in bus_types1:
|
||||
bus_types1[base_type] = set()
|
||||
bus_types1[base_type].add(full_config)
|
||||
|
||||
bus_types2: dict[str, set[str]] = {}
|
||||
for base_type, full_config in configs2:
|
||||
if base_type not in bus_types2:
|
||||
bus_types2[base_type] = set()
|
||||
bus_types2[base_type].add(full_config)
|
||||
|
||||
# Check for conflicts: same bus type with different configs
|
||||
for bus_type, configs in bus_types1.items():
|
||||
if bus_type not in bus_types2:
|
||||
continue # No conflict - different bus types
|
||||
# Same bus type - check if configs match
|
||||
if configs != bus_types2[bus_type]:
|
||||
return False # Conflict - same bus type, different configs
|
||||
|
||||
return True # No conflicts found
|
||||
|
||||
|
||||
def merge_compatible_bus_groups(
|
||||
grouped_components: dict[tuple[str, str], list[str]],
|
||||
) -> dict[tuple[str, str], list[str]]:
|
||||
"""Merge groups with compatible (non-conflicting) buses.
|
||||
|
||||
This function takes groups keyed by (platform, bus_signature) and merges
|
||||
groups that share the same platform and have compatible bus configurations.
|
||||
Two groups can be merged if their buses don't conflict - meaning they don't
|
||||
have different configurations for the same bus type.
|
||||
|
||||
For example:
|
||||
- ["ble"] + ["uart"] = compatible (different buses)
|
||||
- ["uart_9600"] + ["uart_19200"] = incompatible (same bus, different configs)
|
||||
- ["uart_9600"] + ["uart_9600"] = compatible (same bus, same config)
|
||||
|
||||
Args:
|
||||
grouped_components: Dictionary mapping (platform, signature) to list of component names
|
||||
|
||||
Returns:
|
||||
Dictionary with same structure but with compatible groups merged
|
||||
"""
|
||||
merged_groups: dict[tuple[str, str], list[str]] = {}
|
||||
processed_keys: set[tuple[str, str]] = set()
|
||||
|
||||
for (platform1, sig1), comps1 in sorted(grouped_components.items()):
|
||||
if (platform1, sig1) in processed_keys:
|
||||
continue
|
||||
|
||||
# Skip NO_BUSES_SIGNATURE - kept separate for flexible batch distribution
|
||||
# These components have no bus requirements and can be added to any batch
|
||||
# as "fillers" for load balancing across CI runners
|
||||
if sig1 == NO_BUSES_SIGNATURE:
|
||||
merged_groups[(platform1, sig1)] = comps1
|
||||
processed_keys.add((platform1, sig1))
|
||||
continue
|
||||
|
||||
# Skip isolated components - they can't be merged with others
|
||||
if sig1.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
merged_groups[(platform1, sig1)] = comps1
|
||||
processed_keys.add((platform1, sig1))
|
||||
continue
|
||||
|
||||
# Start with this group's components
|
||||
merged_comps: list[str] = list(comps1)
|
||||
merged_sig: str = sig1
|
||||
processed_keys.add((platform1, sig1))
|
||||
|
||||
# Get buses for this group as tuple for caching
|
||||
buses1: tuple[str, ...] = tuple(sorted(sig1.split("+")))
|
||||
|
||||
# Try to merge with other groups on same platform
|
||||
for (platform2, sig2), comps2 in sorted(grouped_components.items()):
|
||||
if (platform2, sig2) in processed_keys:
|
||||
continue
|
||||
if platform2 != platform1:
|
||||
continue # Different platforms can't be merged
|
||||
if sig2 == NO_BUSES_SIGNATURE:
|
||||
continue # Keep separate for flexible batch distribution
|
||||
if sig2.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
continue # Isolated components can't be merged
|
||||
|
||||
# Check if buses are compatible
|
||||
buses2: tuple[str, ...] = tuple(sorted(sig2.split("+")))
|
||||
if are_buses_compatible(buses1, buses2):
|
||||
# Compatible! Merge this group
|
||||
merged_comps.extend(comps2)
|
||||
processed_keys.add((platform2, sig2))
|
||||
# Update merged signature to include all unique buses
|
||||
all_buses: set[str] = set(buses1) | set(buses2)
|
||||
merged_sig = "+".join(sorted(all_buses))
|
||||
buses1 = tuple(sorted(all_buses)) # Update for next iteration
|
||||
|
||||
# Store merged group
|
||||
merged_groups[(platform1, merged_sig)] = merged_comps
|
||||
|
||||
return merged_groups
|
||||
|
||||
|
||||
def create_grouping_signature(
|
||||
platform_buses: dict[str, list[str]], platform: str
|
||||
) -> str:
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import Any
|
||||
|
||||
import aioesphomeapi.api_options_pb2 as pb
|
||||
import google.protobuf.descriptor_pb2 as descriptor
|
||||
from google.protobuf.descriptor_pb2 import FieldDescriptorProto
|
||||
|
||||
|
||||
class WireType(IntEnum):
|
||||
@@ -148,7 +149,7 @@ class TypeInfo(ABC):
|
||||
@property
|
||||
def repeated(self) -> bool:
|
||||
"""Check if the field is repeated."""
|
||||
return self._field.label == 3
|
||||
return self._field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
|
||||
@property
|
||||
def wire_type(self) -> WireType:
|
||||
@@ -337,7 +338,7 @@ def create_field_type_info(
|
||||
needs_encode: bool = True,
|
||||
) -> TypeInfo:
|
||||
"""Create the appropriate TypeInfo instance for a field, handling repeated fields and custom options."""
|
||||
if field.label == 3: # repeated
|
||||
if field.label == FieldDescriptorProto.LABEL_REPEATED:
|
||||
# Check if this repeated field has fixed_array_with_length_define option
|
||||
if (
|
||||
fixed_size := get_field_opt(field, pb.fixed_array_with_length_define)
|
||||
@@ -1879,6 +1880,9 @@ def build_message_type(
|
||||
)
|
||||
public_content.append("#endif")
|
||||
|
||||
# Collect fixed_vector fields for custom decode generation
|
||||
fixed_vector_fields = []
|
||||
|
||||
for field in desc.field:
|
||||
# Skip deprecated fields completely
|
||||
if field.options.deprecated:
|
||||
@@ -1887,7 +1891,7 @@ def build_message_type(
|
||||
# Validate that fixed_array_size is only used in encode-only messages
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == 3
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_array_size) is not None
|
||||
):
|
||||
raise ValueError(
|
||||
@@ -1900,7 +1904,7 @@ def build_message_type(
|
||||
# Validate that fixed_array_with_length_define is only used in encode-only messages
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == 3
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_array_with_length_define) is not None
|
||||
):
|
||||
raise ValueError(
|
||||
@@ -1910,6 +1914,14 @@ def build_message_type(
|
||||
f"since we cannot trust or control the number of items received from clients."
|
||||
)
|
||||
|
||||
# Collect fixed_vector repeated fields for custom decode generation
|
||||
if (
|
||||
needs_decode
|
||||
and field.label == FieldDescriptorProto.LABEL_REPEATED
|
||||
and get_field_opt(field, pb.fixed_vector, False)
|
||||
):
|
||||
fixed_vector_fields.append((field.name, field.number))
|
||||
|
||||
ti = create_field_type_info(field, needs_decode, needs_encode)
|
||||
|
||||
# Skip field declarations for fields that are in the base class
|
||||
@@ -2018,6 +2030,22 @@ def build_message_type(
|
||||
prot = "bool decode_64bit(uint32_t field_id, Proto64Bit value) override;"
|
||||
protected_content.insert(0, prot)
|
||||
|
||||
# Generate custom decode() override for messages with FixedVector fields
|
||||
if fixed_vector_fields:
|
||||
# Generate the decode() implementation in cpp
|
||||
o = f"void {desc.name}::decode(const uint8_t *buffer, size_t length) {{\n"
|
||||
# Count and init each FixedVector field
|
||||
for field_name, field_number in fixed_vector_fields:
|
||||
o += f" uint32_t count_{field_name} = ProtoDecodableMessage::count_repeated_field(buffer, length, {field_number});\n"
|
||||
o += f" this->{field_name}.init(count_{field_name});\n"
|
||||
# Call parent decode to populate the fields
|
||||
o += " ProtoDecodableMessage::decode(buffer, length);\n"
|
||||
o += "}\n"
|
||||
cpp += o
|
||||
# Generate the decode() declaration in header (public method)
|
||||
prot = "void decode(const uint8_t *buffer, size_t length) override;"
|
||||
public_content.append(prot)
|
||||
|
||||
# Only generate encode method if this message needs encoding and has fields
|
||||
if needs_encode and encode:
|
||||
o = f"void {desc.name}::encode(ProtoWriteBuffer buffer) const {{"
|
||||
|
||||
23
script/ci_helpers.py
Executable file
23
script/ci_helpers.py
Executable file
@@ -0,0 +1,23 @@
|
||||
"""Common helper functions for CI scripts."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def write_github_output(outputs: dict[str, str | int]) -> None:
|
||||
"""Write multiple outputs to GITHUB_OUTPUT or stdout.
|
||||
|
||||
When running in GitHub Actions, writes to the GITHUB_OUTPUT file.
|
||||
When running locally, writes to stdout for debugging.
|
||||
|
||||
Args:
|
||||
outputs: Dictionary of key-value pairs to write
|
||||
"""
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output:
|
||||
with open(github_output, "a", encoding="utf-8") as f:
|
||||
f.writelines(f"{key}={value}\n" for key, value in outputs.items())
|
||||
else:
|
||||
for key, value in outputs.items():
|
||||
print(f"{key}={value}")
|
||||
570
script/ci_memory_impact_comment.py
Executable file
570
script/ci_memory_impact_comment.py
Executable file
@@ -0,0 +1,570 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Post or update a PR comment with memory impact analysis results.
|
||||
|
||||
This script creates or updates a GitHub PR comment with memory usage changes.
|
||||
It uses the GitHub CLI (gh) to manage comments and maintains a single comment
|
||||
that gets updated on subsequent runs.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
# Add esphome to path for analyze_memory import
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
|
||||
# Comment marker to identify our memory impact comments
|
||||
COMMENT_MARKER = "<!-- esphome-memory-impact-analysis -->"
|
||||
|
||||
# Thresholds for emoji significance indicators (percentage)
|
||||
OVERALL_CHANGE_THRESHOLD = 1.0 # Overall RAM/Flash changes
|
||||
COMPONENT_CHANGE_THRESHOLD = 3.0 # Component breakdown changes
|
||||
|
||||
# Display limits for tables
|
||||
MAX_COMPONENT_BREAKDOWN_ROWS = 20 # Maximum components to show in breakdown table
|
||||
MAX_CHANGED_SYMBOLS_ROWS = 30 # Maximum changed symbols to show
|
||||
MAX_NEW_SYMBOLS_ROWS = 15 # Maximum new symbols to show
|
||||
MAX_REMOVED_SYMBOLS_ROWS = 15 # Maximum removed symbols to show
|
||||
|
||||
# Symbol display formatting
|
||||
SYMBOL_DISPLAY_MAX_LENGTH = 100 # Max length before using <details> tag
|
||||
SYMBOL_DISPLAY_TRUNCATE_LENGTH = 97 # Length to truncate in summary
|
||||
|
||||
# Component change noise threshold
|
||||
COMPONENT_CHANGE_NOISE_THRESHOLD = 2 # Ignore component changes ≤ this many bytes
|
||||
|
||||
# Template directory
|
||||
TEMPLATE_DIR = Path(__file__).parent / "templates"
|
||||
|
||||
|
||||
def load_analysis_json(json_path: str) -> dict | None:
|
||||
"""Load memory analysis results from JSON file.
|
||||
|
||||
Args:
|
||||
json_path: Path to analysis JSON file
|
||||
|
||||
Returns:
|
||||
Dictionary with analysis results or None if file doesn't exist/can't be loaded
|
||||
"""
|
||||
json_file = Path(json_path)
|
||||
if not json_file.exists():
|
||||
print(f"Analysis JSON not found: {json_path}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(json_file, encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(f"Failed to load analysis JSON: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def format_bytes(bytes_value: int) -> str:
|
||||
"""Format bytes value with comma separators.
|
||||
|
||||
Args:
|
||||
bytes_value: Number of bytes
|
||||
|
||||
Returns:
|
||||
Formatted string with comma separators (e.g., "1,234 bytes")
|
||||
"""
|
||||
return f"{bytes_value:,} bytes"
|
||||
|
||||
|
||||
def format_change(before: int, after: int, threshold: float | None = None) -> str:
|
||||
"""Format memory change with delta and percentage.
|
||||
|
||||
Args:
|
||||
before: Memory usage before change (in bytes)
|
||||
after: Memory usage after change (in bytes)
|
||||
threshold: Optional percentage threshold for "significant" change.
|
||||
If provided, adds supplemental emoji (🎉/🚨/🔸/✅) to chart icons.
|
||||
If None, only shows chart icons (📈/📉/➡️).
|
||||
|
||||
Returns:
|
||||
Formatted string with delta and percentage
|
||||
"""
|
||||
delta = after - before
|
||||
percentage = 0.0 if before == 0 else (delta / before) * 100
|
||||
|
||||
# Always use chart icons to show direction
|
||||
if delta > 0:
|
||||
delta_str = f"+{delta:,} bytes"
|
||||
trend_icon = "📈"
|
||||
# Add supplemental emoji based on threshold if provided
|
||||
if threshold is not None:
|
||||
significance = "🚨" if abs(percentage) > threshold else "🔸"
|
||||
emoji = f"{trend_icon} {significance}"
|
||||
else:
|
||||
emoji = trend_icon
|
||||
elif delta < 0:
|
||||
delta_str = f"{delta:,} bytes"
|
||||
trend_icon = "📉"
|
||||
# Add supplemental emoji based on threshold if provided
|
||||
if threshold is not None:
|
||||
significance = "🎉" if abs(percentage) > threshold else "✅"
|
||||
emoji = f"{trend_icon} {significance}"
|
||||
else:
|
||||
emoji = trend_icon
|
||||
else:
|
||||
delta_str = "+0 bytes"
|
||||
emoji = "➡️"
|
||||
|
||||
# Format percentage with sign
|
||||
if percentage > 0:
|
||||
pct_str = f"+{percentage:.2f}%"
|
||||
elif percentage < 0:
|
||||
pct_str = f"{percentage:.2f}%"
|
||||
else:
|
||||
pct_str = "0.00%"
|
||||
|
||||
return f"{emoji} {delta_str} ({pct_str})"
|
||||
|
||||
|
||||
def prepare_symbol_changes_data(
|
||||
target_symbols: dict | None, pr_symbols: dict | None
|
||||
) -> dict | None:
|
||||
"""Prepare symbol changes data for template rendering.
|
||||
|
||||
Args:
|
||||
target_symbols: Symbol name to size mapping for target branch
|
||||
pr_symbols: Symbol name to size mapping for PR branch
|
||||
|
||||
Returns:
|
||||
Dictionary with changed, new, and removed symbols, or None if no changes
|
||||
"""
|
||||
if not target_symbols or not pr_symbols:
|
||||
return None
|
||||
|
||||
# Find all symbols that exist in both branches or only in one
|
||||
all_symbols = set(target_symbols.keys()) | set(pr_symbols.keys())
|
||||
|
||||
# Track changes
|
||||
changed_symbols: list[
|
||||
tuple[str, int, int, int]
|
||||
] = [] # (symbol, target_size, pr_size, delta)
|
||||
new_symbols: list[tuple[str, int]] = [] # (symbol, size)
|
||||
removed_symbols: list[tuple[str, int]] = [] # (symbol, size)
|
||||
|
||||
for symbol in all_symbols:
|
||||
target_size = target_symbols.get(symbol, 0)
|
||||
pr_size = pr_symbols.get(symbol, 0)
|
||||
|
||||
if target_size == 0 and pr_size > 0:
|
||||
# New symbol
|
||||
new_symbols.append((symbol, pr_size))
|
||||
elif target_size > 0 and pr_size == 0:
|
||||
# Removed symbol
|
||||
removed_symbols.append((symbol, target_size))
|
||||
elif target_size != pr_size:
|
||||
# Changed symbol
|
||||
delta = pr_size - target_size
|
||||
changed_symbols.append((symbol, target_size, pr_size, delta))
|
||||
|
||||
if not changed_symbols and not new_symbols and not removed_symbols:
|
||||
return None
|
||||
|
||||
# Sort by size/delta
|
||||
changed_symbols.sort(key=lambda x: abs(x[3]), reverse=True)
|
||||
new_symbols.sort(key=lambda x: x[1], reverse=True)
|
||||
removed_symbols.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
return {
|
||||
"changed_symbols": changed_symbols,
|
||||
"new_symbols": new_symbols,
|
||||
"removed_symbols": removed_symbols,
|
||||
}
|
||||
|
||||
|
||||
def prepare_component_breakdown_data(
|
||||
target_analysis: dict | None, pr_analysis: dict | None
|
||||
) -> list[tuple[str, int, int, int]] | None:
|
||||
"""Prepare component breakdown data for template rendering.
|
||||
|
||||
Args:
|
||||
target_analysis: Component memory breakdown for target branch
|
||||
pr_analysis: Component memory breakdown for PR branch
|
||||
|
||||
Returns:
|
||||
List of tuples (component, target_flash, pr_flash, delta), or None if no changes
|
||||
"""
|
||||
if not target_analysis or not pr_analysis:
|
||||
return None
|
||||
|
||||
# Combine all components from both analyses
|
||||
all_components = set(target_analysis.keys()) | set(pr_analysis.keys())
|
||||
|
||||
# Filter to components that have changed (ignoring noise)
|
||||
changed_components: list[
|
||||
tuple[str, int, int, int]
|
||||
] = [] # (comp, target_flash, pr_flash, delta)
|
||||
for comp in all_components:
|
||||
target_mem = target_analysis.get(comp, {})
|
||||
pr_mem = pr_analysis.get(comp, {})
|
||||
|
||||
target_flash = target_mem.get("flash_total", 0)
|
||||
pr_flash = pr_mem.get("flash_total", 0)
|
||||
|
||||
# Only include if component has meaningful change (above noise threshold)
|
||||
delta = pr_flash - target_flash
|
||||
if abs(delta) > COMPONENT_CHANGE_NOISE_THRESHOLD:
|
||||
changed_components.append((comp, target_flash, pr_flash, delta))
|
||||
|
||||
if not changed_components:
|
||||
return None
|
||||
|
||||
# Sort by absolute delta (largest changes first)
|
||||
changed_components.sort(key=lambda x: abs(x[3]), reverse=True)
|
||||
|
||||
return changed_components
|
||||
|
||||
|
||||
def create_comment_body(
|
||||
components: list[str],
|
||||
platform: str,
|
||||
target_ram: int,
|
||||
target_flash: int,
|
||||
pr_ram: int,
|
||||
pr_flash: int,
|
||||
target_analysis: dict | None = None,
|
||||
pr_analysis: dict | None = None,
|
||||
target_symbols: dict | None = None,
|
||||
pr_symbols: dict | None = None,
|
||||
target_cache_hit: bool = False,
|
||||
) -> str:
|
||||
"""Create the comment body with memory impact analysis using Jinja2 templates.
|
||||
|
||||
Args:
|
||||
components: List of component names (merged config)
|
||||
platform: Platform name
|
||||
target_ram: RAM usage in target branch
|
||||
target_flash: Flash usage in target branch
|
||||
pr_ram: RAM usage in PR branch
|
||||
pr_flash: Flash usage in PR branch
|
||||
target_analysis: Optional component breakdown for target branch
|
||||
pr_analysis: Optional component breakdown for PR branch
|
||||
target_symbols: Optional symbol map for target branch
|
||||
pr_symbols: Optional symbol map for PR branch
|
||||
target_cache_hit: Whether target branch analysis was loaded from cache
|
||||
|
||||
Returns:
|
||||
Formatted comment body
|
||||
"""
|
||||
# Set up Jinja2 environment
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(TEMPLATE_DIR),
|
||||
trim_blocks=True,
|
||||
lstrip_blocks=True,
|
||||
)
|
||||
|
||||
# Register custom filters
|
||||
env.filters["format_bytes"] = format_bytes
|
||||
env.filters["format_change"] = format_change
|
||||
|
||||
# Prepare template context
|
||||
context = {
|
||||
"comment_marker": COMMENT_MARKER,
|
||||
"platform": platform,
|
||||
"target_ram": format_bytes(target_ram),
|
||||
"pr_ram": format_bytes(pr_ram),
|
||||
"target_flash": format_bytes(target_flash),
|
||||
"pr_flash": format_bytes(pr_flash),
|
||||
"ram_change": format_change(
|
||||
target_ram, pr_ram, threshold=OVERALL_CHANGE_THRESHOLD
|
||||
),
|
||||
"flash_change": format_change(
|
||||
target_flash, pr_flash, threshold=OVERALL_CHANGE_THRESHOLD
|
||||
),
|
||||
"target_cache_hit": target_cache_hit,
|
||||
"component_change_threshold": COMPONENT_CHANGE_THRESHOLD,
|
||||
}
|
||||
|
||||
# Format components list
|
||||
if len(components) == 1:
|
||||
context["components_str"] = f"`{components[0]}`"
|
||||
context["config_note"] = "a representative test configuration"
|
||||
else:
|
||||
context["components_str"] = ", ".join(f"`{c}`" for c in sorted(components))
|
||||
context["config_note"] = (
|
||||
f"a merged configuration with {len(components)} components"
|
||||
)
|
||||
|
||||
# Prepare component breakdown if available
|
||||
component_breakdown = ""
|
||||
if target_analysis and pr_analysis:
|
||||
changed_components = prepare_component_breakdown_data(
|
||||
target_analysis, pr_analysis
|
||||
)
|
||||
if changed_components:
|
||||
template = env.get_template("ci_memory_impact_component_breakdown.j2")
|
||||
component_breakdown = template.render(
|
||||
changed_components=changed_components,
|
||||
format_bytes=format_bytes,
|
||||
format_change=format_change,
|
||||
component_change_threshold=COMPONENT_CHANGE_THRESHOLD,
|
||||
max_rows=MAX_COMPONENT_BREAKDOWN_ROWS,
|
||||
)
|
||||
|
||||
# Prepare symbol changes if available
|
||||
symbol_changes = ""
|
||||
if target_symbols and pr_symbols:
|
||||
symbol_data = prepare_symbol_changes_data(target_symbols, pr_symbols)
|
||||
if symbol_data:
|
||||
template = env.get_template("ci_memory_impact_symbol_changes.j2")
|
||||
symbol_changes = template.render(
|
||||
**symbol_data,
|
||||
format_bytes=format_bytes,
|
||||
format_change=format_change,
|
||||
max_changed_rows=MAX_CHANGED_SYMBOLS_ROWS,
|
||||
max_new_rows=MAX_NEW_SYMBOLS_ROWS,
|
||||
max_removed_rows=MAX_REMOVED_SYMBOLS_ROWS,
|
||||
symbol_max_length=SYMBOL_DISPLAY_MAX_LENGTH,
|
||||
symbol_truncate_length=SYMBOL_DISPLAY_TRUNCATE_LENGTH,
|
||||
)
|
||||
|
||||
if not target_analysis or not pr_analysis:
|
||||
print("No ELF files provided, skipping detailed analysis", file=sys.stderr)
|
||||
|
||||
context["component_breakdown"] = component_breakdown
|
||||
context["symbol_changes"] = symbol_changes
|
||||
|
||||
# Render main template
|
||||
template = env.get_template("ci_memory_impact_comment_template.j2")
|
||||
return template.render(**context)
|
||||
|
||||
|
||||
def find_existing_comment(pr_number: str) -> str | None:
|
||||
"""Find existing memory impact comment on the PR.
|
||||
|
||||
Args:
|
||||
pr_number: PR number
|
||||
|
||||
Returns:
|
||||
Comment numeric ID if found, None otherwise
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
print(f"DEBUG: Looking for existing comment on PR #{pr_number}", file=sys.stderr)
|
||||
|
||||
# Use gh api to get comments directly - this returns the numeric id field
|
||||
result = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"api",
|
||||
f"/repos/{{owner}}/{{repo}}/issues/{pr_number}/comments",
|
||||
"--jq",
|
||||
".[] | {id, body}",
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
print(
|
||||
f"DEBUG: gh api comments output (first 500 chars):\n{result.stdout[:500]}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Parse comments and look for our marker
|
||||
comment_count = 0
|
||||
for line in result.stdout.strip().split("\n"):
|
||||
if not line:
|
||||
continue
|
||||
|
||||
try:
|
||||
comment = json.loads(line)
|
||||
comment_count += 1
|
||||
comment_id = comment.get("id")
|
||||
print(
|
||||
f"DEBUG: Checking comment {comment_count}: id={comment_id}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
body = comment.get("body", "")
|
||||
if COMMENT_MARKER in body:
|
||||
print(
|
||||
f"DEBUG: Found existing comment with id={comment_id}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
# Return the numeric id
|
||||
return str(comment_id)
|
||||
print("DEBUG: Comment does not contain marker", file=sys.stderr)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"DEBUG: JSON decode error: {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
print(
|
||||
f"DEBUG: No existing comment found (checked {comment_count} comments)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def update_existing_comment(comment_id: str, comment_body: str) -> None:
|
||||
"""Update an existing comment.
|
||||
|
||||
Args:
|
||||
comment_id: Comment ID to update
|
||||
comment_body: New comment body text
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
print(f"DEBUG: Updating existing comment {comment_id}", file=sys.stderr)
|
||||
result = subprocess.run(
|
||||
[
|
||||
"gh",
|
||||
"api",
|
||||
f"/repos/{{owner}}/{{repo}}/issues/comments/{comment_id}",
|
||||
"-X",
|
||||
"PATCH",
|
||||
"-f",
|
||||
f"body={comment_body}",
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
print(f"DEBUG: Update response: {result.stdout}", file=sys.stderr)
|
||||
|
||||
|
||||
def create_new_comment(pr_number: str, comment_body: str) -> None:
|
||||
"""Create a new PR comment.
|
||||
|
||||
Args:
|
||||
pr_number: PR number
|
||||
comment_body: Comment body text
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
print(f"DEBUG: Posting new comment on PR #{pr_number}", file=sys.stderr)
|
||||
result = subprocess.run(
|
||||
["gh", "pr", "comment", pr_number, "--body", comment_body],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
print(f"DEBUG: Post response: {result.stdout}", file=sys.stderr)
|
||||
|
||||
|
||||
def post_or_update_comment(pr_number: str, comment_body: str) -> None:
|
||||
"""Post a new comment or update existing one.
|
||||
|
||||
Args:
|
||||
pr_number: PR number
|
||||
comment_body: Comment body text
|
||||
|
||||
Raises:
|
||||
subprocess.CalledProcessError: If gh command fails
|
||||
"""
|
||||
# Look for existing comment
|
||||
existing_comment_id = find_existing_comment(pr_number)
|
||||
|
||||
if existing_comment_id and existing_comment_id != "None":
|
||||
update_existing_comment(existing_comment_id, comment_body)
|
||||
else:
|
||||
create_new_comment(pr_number, comment_body)
|
||||
|
||||
print("Comment posted/updated successfully", file=sys.stderr)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Post or update PR comment with memory impact analysis"
|
||||
)
|
||||
parser.add_argument("--pr-number", required=True, help="PR number")
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
required=True,
|
||||
help='JSON array of component names (e.g., \'["api", "wifi"]\')',
|
||||
)
|
||||
parser.add_argument("--platform", required=True, help="Platform name")
|
||||
parser.add_argument(
|
||||
"--target-ram", type=int, required=True, help="Target branch RAM usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-flash", type=int, required=True, help="Target branch flash usage"
|
||||
)
|
||||
parser.add_argument("--pr-ram", type=int, required=True, help="PR branch RAM usage")
|
||||
parser.add_argument(
|
||||
"--pr-flash", type=int, required=True, help="PR branch flash usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-json",
|
||||
help="Optional path to target branch analysis JSON (for detailed analysis)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pr-json",
|
||||
help="Optional path to PR branch analysis JSON (for detailed analysis)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-cache-hit",
|
||||
action="store_true",
|
||||
help="Indicates that target branch analysis was loaded from cache",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse components from JSON
|
||||
try:
|
||||
components = json.loads(args.components)
|
||||
if not isinstance(components, list):
|
||||
print("Error: --components must be a JSON array", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing --components JSON: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Load analysis JSON files
|
||||
target_analysis = None
|
||||
pr_analysis = None
|
||||
target_symbols = None
|
||||
pr_symbols = None
|
||||
|
||||
if args.target_json:
|
||||
target_data = load_analysis_json(args.target_json)
|
||||
if target_data and target_data.get("detailed_analysis"):
|
||||
target_analysis = target_data["detailed_analysis"].get("components")
|
||||
target_symbols = target_data["detailed_analysis"].get("symbols")
|
||||
|
||||
if args.pr_json:
|
||||
pr_data = load_analysis_json(args.pr_json)
|
||||
if pr_data and pr_data.get("detailed_analysis"):
|
||||
pr_analysis = pr_data["detailed_analysis"].get("components")
|
||||
pr_symbols = pr_data["detailed_analysis"].get("symbols")
|
||||
|
||||
# Create comment body
|
||||
# Note: Memory totals (RAM/Flash) are summed across all builds if multiple were run.
|
||||
comment_body = create_comment_body(
|
||||
components=components,
|
||||
platform=args.platform,
|
||||
target_ram=args.target_ram,
|
||||
target_flash=args.target_flash,
|
||||
pr_ram=args.pr_ram,
|
||||
pr_flash=args.pr_flash,
|
||||
target_analysis=target_analysis,
|
||||
pr_analysis=pr_analysis,
|
||||
target_symbols=target_symbols,
|
||||
pr_symbols=pr_symbols,
|
||||
target_cache_hit=args.target_cache_hit,
|
||||
)
|
||||
|
||||
# Post or update comment
|
||||
post_or_update_comment(args.pr_number, comment_body)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
281
script/ci_memory_impact_extract.py
Executable file
281
script/ci_memory_impact_extract.py
Executable file
@@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Extract memory usage statistics from ESPHome build output.
|
||||
|
||||
This script parses the PlatformIO build output to extract RAM and flash
|
||||
usage statistics for a compiled component. It's used by the CI workflow to
|
||||
compare memory usage between branches.
|
||||
|
||||
The script reads compile output from stdin and looks for the standard
|
||||
PlatformIO output format:
|
||||
RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)
|
||||
Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)
|
||||
|
||||
Optionally performs detailed memory analysis if a build directory is provided.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from esphome.analyze_memory import MemoryAnalyzer
|
||||
from esphome.platformio_api import IDEData
|
||||
from script.ci_helpers import write_github_output
|
||||
|
||||
# Regex patterns for extracting memory usage from PlatformIO output
|
||||
_RAM_PATTERN = re.compile(r"RAM:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
|
||||
_FLASH_PATTERN = re.compile(r"Flash:\s+\[.*?\]\s+\d+\.\d+%\s+\(used\s+(\d+)\s+bytes")
|
||||
_BUILD_PATH_PATTERN = re.compile(r"Build path: (.+)")
|
||||
|
||||
|
||||
def extract_from_compile_output(
|
||||
output_text: str,
|
||||
) -> tuple[int | None, int | None, str | None]:
|
||||
"""Extract memory usage and build directory from PlatformIO compile output.
|
||||
|
||||
Supports multiple builds (for component groups or isolated components).
|
||||
When test_build_components.py creates multiple builds, this sums the
|
||||
memory usage across all builds.
|
||||
|
||||
Looks for lines like:
|
||||
RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)
|
||||
Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)
|
||||
|
||||
Also extracts build directory from lines like:
|
||||
INFO Compiling app... Build path: /path/to/build
|
||||
|
||||
Args:
|
||||
output_text: Compile output text (may contain multiple builds)
|
||||
|
||||
Returns:
|
||||
Tuple of (total_ram_bytes, total_flash_bytes, build_dir) or (None, None, None) if not found
|
||||
"""
|
||||
# Find all RAM and Flash matches (may be multiple builds)
|
||||
ram_matches = _RAM_PATTERN.findall(output_text)
|
||||
flash_matches = _FLASH_PATTERN.findall(output_text)
|
||||
|
||||
if not ram_matches or not flash_matches:
|
||||
return None, None, None
|
||||
|
||||
# Sum all builds (handles multiple component groups)
|
||||
total_ram = sum(int(match) for match in ram_matches)
|
||||
total_flash = sum(int(match) for match in flash_matches)
|
||||
|
||||
# Extract build directory from ESPHome's explicit build path output
|
||||
# Look for: INFO Compiling app... Build path: /path/to/build
|
||||
# Note: Multiple builds reuse the same build path (each overwrites the previous)
|
||||
build_dir = None
|
||||
if match := _BUILD_PATH_PATTERN.search(output_text):
|
||||
build_dir = match.group(1).strip()
|
||||
|
||||
return total_ram, total_flash, build_dir
|
||||
|
||||
|
||||
def run_detailed_analysis(build_dir: str) -> dict | None:
|
||||
"""Run detailed memory analysis on build directory.
|
||||
|
||||
Args:
|
||||
build_dir: Path to ESPHome build directory
|
||||
|
||||
Returns:
|
||||
Dictionary with analysis results or None if analysis fails
|
||||
"""
|
||||
build_path = Path(build_dir)
|
||||
if not build_path.exists():
|
||||
print(f"Build directory not found: {build_dir}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# Find firmware.elf
|
||||
elf_path = None
|
||||
for elf_candidate in [
|
||||
build_path / "firmware.elf",
|
||||
build_path / ".pioenvs" / build_path.name / "firmware.elf",
|
||||
]:
|
||||
if elf_candidate.exists():
|
||||
elf_path = str(elf_candidate)
|
||||
break
|
||||
|
||||
if not elf_path:
|
||||
print(f"firmware.elf not found in {build_dir}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
# Find idedata.json - check multiple locations
|
||||
device_name = build_path.name
|
||||
idedata_candidates = [
|
||||
# In .pioenvs for test builds
|
||||
build_path / ".pioenvs" / device_name / "idedata.json",
|
||||
# In .esphome/idedata for regular builds
|
||||
Path.home() / ".esphome" / "idedata" / f"{device_name}.json",
|
||||
# Check parent directories for .esphome/idedata (for test_build_components)
|
||||
build_path.parent.parent.parent / "idedata" / f"{device_name}.json",
|
||||
]
|
||||
|
||||
idedata = None
|
||||
for idedata_path in idedata_candidates:
|
||||
if not idedata_path.exists():
|
||||
continue
|
||||
try:
|
||||
with open(idedata_path, encoding="utf-8") as f:
|
||||
raw_data = json.load(f)
|
||||
idedata = IDEData(raw_data)
|
||||
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
|
||||
break
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
print(
|
||||
f"Warning: Failed to load idedata from {idedata_path}: {e}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
analyzer = MemoryAnalyzer(elf_path, idedata=idedata)
|
||||
components = analyzer.analyze()
|
||||
|
||||
# Convert to JSON-serializable format
|
||||
result = {
|
||||
"components": {
|
||||
name: {
|
||||
"text": mem.text_size,
|
||||
"rodata": mem.rodata_size,
|
||||
"data": mem.data_size,
|
||||
"bss": mem.bss_size,
|
||||
"flash_total": mem.flash_total,
|
||||
"ram_total": mem.ram_total,
|
||||
"symbol_count": mem.symbol_count,
|
||||
}
|
||||
for name, mem in components.items()
|
||||
},
|
||||
"symbols": {},
|
||||
}
|
||||
|
||||
# Build symbol map
|
||||
for section in analyzer.sections.values():
|
||||
for symbol_name, size, _ in section.symbols:
|
||||
if size > 0:
|
||||
demangled = analyzer._demangle_symbol(symbol_name)
|
||||
result["symbols"][demangled] = size
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Extract memory usage from ESPHome build output"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-env",
|
||||
action="store_true",
|
||||
help="Output to GITHUB_OUTPUT environment file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--build-dir",
|
||||
help="Optional build directory for detailed memory analysis (overrides auto-detection)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-json",
|
||||
help="Optional path to save detailed analysis JSON",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-build-dir",
|
||||
help="Optional path to write the detected build directory",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read compile output from stdin
|
||||
compile_output = sys.stdin.read()
|
||||
|
||||
# Extract memory usage and build directory
|
||||
ram_bytes, flash_bytes, detected_build_dir = extract_from_compile_output(
|
||||
compile_output
|
||||
)
|
||||
|
||||
if ram_bytes is None or flash_bytes is None:
|
||||
print("Failed to extract memory usage from compile output", file=sys.stderr)
|
||||
print("Expected lines like:", file=sys.stderr)
|
||||
print(
|
||||
" RAM: [==== ] 36.1% (used 29548 bytes from 81920 bytes)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
" Flash: [=== ] 34.0% (used 348511 bytes from 1023984 bytes)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 1
|
||||
|
||||
# Count how many builds were found
|
||||
num_builds = len(_RAM_PATTERN.findall(compile_output))
|
||||
|
||||
if num_builds > 1:
|
||||
print(
|
||||
f"Found {num_builds} builds - summing memory usage across all builds",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(
|
||||
"WARNING: Detailed analysis will only cover the last build",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
print(f"Total RAM: {ram_bytes} bytes", file=sys.stderr)
|
||||
print(f"Total Flash: {flash_bytes} bytes", file=sys.stderr)
|
||||
|
||||
# Determine which build directory to use (explicit arg overrides auto-detection)
|
||||
build_dir = args.build_dir or detected_build_dir
|
||||
|
||||
if detected_build_dir:
|
||||
print(f"Detected build directory: {detected_build_dir}", file=sys.stderr)
|
||||
if num_builds > 1:
|
||||
print(
|
||||
f" (using last of {num_builds} builds for detailed analysis)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Write build directory to file if requested
|
||||
if args.output_build_dir and build_dir:
|
||||
build_dir_path = Path(args.output_build_dir)
|
||||
build_dir_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
build_dir_path.write_text(build_dir)
|
||||
print(f"Wrote build directory to {args.output_build_dir}", file=sys.stderr)
|
||||
|
||||
# Run detailed analysis if build directory available
|
||||
detailed_analysis = None
|
||||
if build_dir:
|
||||
print(f"Running detailed analysis on {build_dir}", file=sys.stderr)
|
||||
detailed_analysis = run_detailed_analysis(build_dir)
|
||||
|
||||
# Save JSON output if requested
|
||||
if args.output_json:
|
||||
output_data = {
|
||||
"ram_bytes": ram_bytes,
|
||||
"flash_bytes": flash_bytes,
|
||||
"detailed_analysis": detailed_analysis,
|
||||
}
|
||||
|
||||
output_path = Path(args.output_json)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(output_path, "w", encoding="utf-8") as f:
|
||||
json.dump(output_data, f, indent=2)
|
||||
print(f"Saved analysis to {args.output_json}", file=sys.stderr)
|
||||
|
||||
if args.output_env:
|
||||
# Output to GitHub Actions
|
||||
write_github_output(
|
||||
{
|
||||
"ram_usage": ram_bytes,
|
||||
"flash_usage": flash_bytes,
|
||||
}
|
||||
)
|
||||
else:
|
||||
print(f"{ram_bytes},{flash_bytes}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -10,7 +10,13 @@ what files have changed. It outputs JSON with the following structure:
|
||||
"clang_format": true/false,
|
||||
"python_linters": true/false,
|
||||
"changed_components": ["component1", "component2", ...],
|
||||
"component_test_count": 5
|
||||
"component_test_count": 5,
|
||||
"memory_impact": {
|
||||
"should_run": "true/false",
|
||||
"components": ["component1", "component2", ...],
|
||||
"platform": "esp32-idf",
|
||||
"use_merged_config": "true"
|
||||
}
|
||||
}
|
||||
|
||||
The CI workflow uses this information to:
|
||||
@@ -20,6 +26,7 @@ The CI workflow uses this information to:
|
||||
- Skip or run Python linters (ruff, flake8, pylint, pyupgrade)
|
||||
- Determine which components to test individually
|
||||
- Decide how to split component tests (if there are many)
|
||||
- Run memory impact analysis whenever there are changed components (merged config), and also for core-only changes
|
||||
|
||||
Usage:
|
||||
python script/determine-jobs.py [-b BRANCH]
|
||||
@@ -31,6 +38,8 @@ Options:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from collections import Counter
|
||||
from enum import StrEnum
|
||||
from functools import cache
|
||||
import json
|
||||
import os
|
||||
@@ -40,16 +49,47 @@ import sys
|
||||
from typing import Any
|
||||
|
||||
from helpers import (
|
||||
BASE_BUS_COMPONENTS,
|
||||
CPP_FILE_EXTENSIONS,
|
||||
ESPHOME_COMPONENTS_PATH,
|
||||
PYTHON_FILE_EXTENSIONS,
|
||||
changed_files,
|
||||
get_all_dependencies,
|
||||
get_component_from_path,
|
||||
get_component_test_files,
|
||||
get_components_from_integration_fixtures,
|
||||
parse_test_filename,
|
||||
root_path,
|
||||
)
|
||||
|
||||
|
||||
class Platform(StrEnum):
|
||||
"""Platform identifiers for memory impact analysis."""
|
||||
|
||||
ESP8266_ARD = "esp8266-ard"
|
||||
ESP32_IDF = "esp32-idf"
|
||||
ESP32_C3_IDF = "esp32-c3-idf"
|
||||
ESP32_C6_IDF = "esp32-c6-idf"
|
||||
ESP32_S2_IDF = "esp32-s2-idf"
|
||||
ESP32_S3_IDF = "esp32-s3-idf"
|
||||
|
||||
|
||||
# Memory impact analysis constants
|
||||
MEMORY_IMPACT_FALLBACK_COMPONENT = "api" # Representative component for core changes
|
||||
MEMORY_IMPACT_FALLBACK_PLATFORM = Platform.ESP32_IDF # Most representative platform
|
||||
|
||||
# Platform preference order for memory impact analysis
|
||||
# Prefer newer platforms first as they represent the future of ESPHome
|
||||
# ESP8266 is most constrained but many new features don't support it
|
||||
MEMORY_IMPACT_PLATFORM_PREFERENCE = [
|
||||
Platform.ESP32_C6_IDF, # ESP32-C6 IDF (newest, supports Thread/Zigbee)
|
||||
Platform.ESP8266_ARD, # ESP8266 Arduino (most memory constrained - best for impact analysis)
|
||||
Platform.ESP32_IDF, # ESP32 IDF platform (primary ESP32 platform, most representative)
|
||||
Platform.ESP32_C3_IDF, # ESP32-C3 IDF
|
||||
Platform.ESP32_S2_IDF, # ESP32-S2 IDF
|
||||
Platform.ESP32_S3_IDF, # ESP32-S3 IDF
|
||||
]
|
||||
|
||||
|
||||
def should_run_integration_tests(branch: str | None = None) -> bool:
|
||||
"""Determine if integration tests should run based on changed files.
|
||||
|
||||
@@ -105,12 +145,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
|
||||
|
||||
# Check if any required components changed
|
||||
for file in files:
|
||||
if file.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
parts = file.split("/")
|
||||
if len(parts) >= 3:
|
||||
component = parts[2]
|
||||
if component in all_required_components:
|
||||
return True
|
||||
component = get_component_from_path(file)
|
||||
if component and component in all_required_components:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -224,10 +261,136 @@ def _component_has_tests(component: str) -> bool:
|
||||
Returns:
|
||||
True if the component has test YAML files
|
||||
"""
|
||||
tests_dir = Path(root_path) / "tests" / "components" / component
|
||||
if not tests_dir.exists():
|
||||
return False
|
||||
return any(tests_dir.glob("test.*.yaml"))
|
||||
return bool(get_component_test_files(component))
|
||||
|
||||
|
||||
def detect_memory_impact_config(
|
||||
branch: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Determine memory impact analysis configuration.
|
||||
|
||||
Always runs memory impact analysis when there are changed components,
|
||||
building a merged configuration with all changed components (like
|
||||
test_build_components.py does) to get comprehensive memory analysis.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against
|
||||
|
||||
Returns:
|
||||
Dictionary with memory impact analysis parameters:
|
||||
- should_run: "true" or "false"
|
||||
- components: list of component names to analyze
|
||||
- platform: platform name for the merged build
|
||||
- use_merged_config: "true" (always use merged config)
|
||||
"""
|
||||
|
||||
# Get actually changed files (not dependencies)
|
||||
files = changed_files(branch)
|
||||
|
||||
# Find all changed components (excluding core and base bus components)
|
||||
changed_component_set: set[str] = set()
|
||||
has_core_changes = False
|
||||
|
||||
for file in files:
|
||||
component = get_component_from_path(file)
|
||||
if component:
|
||||
# Skip base bus components as they're used across many builds
|
||||
if component not in BASE_BUS_COMPONENTS:
|
||||
changed_component_set.add(component)
|
||||
elif file.startswith("esphome/"):
|
||||
# Core ESPHome files changed (not component-specific)
|
||||
has_core_changes = True
|
||||
|
||||
# If no components changed but core changed, test representative component
|
||||
force_fallback_platform = False
|
||||
if not changed_component_set and has_core_changes:
|
||||
print(
|
||||
f"Memory impact: No components changed, but core files changed. "
|
||||
f"Testing {MEMORY_IMPACT_FALLBACK_COMPONENT} component on {MEMORY_IMPACT_FALLBACK_PLATFORM}.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
changed_component_set.add(MEMORY_IMPACT_FALLBACK_COMPONENT)
|
||||
force_fallback_platform = True # Use fallback platform (most representative)
|
||||
elif not changed_component_set:
|
||||
# No components and no core changes
|
||||
return {"should_run": "false"}
|
||||
|
||||
# Find components that have tests and collect their supported platforms
|
||||
components_with_tests: list[str] = []
|
||||
component_platforms_map: dict[
|
||||
str, set[Platform]
|
||||
] = {} # Track which platforms each component supports
|
||||
|
||||
for component in sorted(changed_component_set):
|
||||
# Look for test files on preferred platforms
|
||||
test_files = get_component_test_files(component)
|
||||
if not test_files:
|
||||
continue
|
||||
|
||||
# Check if component has tests for any preferred platform
|
||||
available_platforms = [
|
||||
platform
|
||||
for test_file in test_files
|
||||
if (platform := parse_test_filename(test_file)[1]) != "all"
|
||||
and platform in MEMORY_IMPACT_PLATFORM_PREFERENCE
|
||||
]
|
||||
|
||||
if not available_platforms:
|
||||
continue
|
||||
|
||||
component_platforms_map[component] = set(available_platforms)
|
||||
components_with_tests.append(component)
|
||||
|
||||
# If no components have tests, don't run memory impact
|
||||
if not components_with_tests:
|
||||
return {"should_run": "false"}
|
||||
|
||||
# Find common platforms supported by ALL components
|
||||
# This ensures we can build all components together in a merged config
|
||||
common_platforms = set(MEMORY_IMPACT_PLATFORM_PREFERENCE)
|
||||
for component, platforms in component_platforms_map.items():
|
||||
common_platforms &= platforms
|
||||
|
||||
# Select the most preferred platform from the common set
|
||||
# Exception: for core changes, use fallback platform (most representative of codebase)
|
||||
if force_fallback_platform:
|
||||
platform = MEMORY_IMPACT_FALLBACK_PLATFORM
|
||||
elif common_platforms:
|
||||
# Pick the most preferred platform that all components support
|
||||
platform = min(common_platforms, key=MEMORY_IMPACT_PLATFORM_PREFERENCE.index)
|
||||
else:
|
||||
# No common platform - pick the most commonly supported platform
|
||||
# This allows testing components individually even if they can't be merged
|
||||
# Count how many components support each platform
|
||||
platform_counts = Counter(
|
||||
p for platforms in component_platforms_map.values() for p in platforms
|
||||
)
|
||||
# Pick the platform supported by most components, preferring earlier in MEMORY_IMPACT_PLATFORM_PREFERENCE
|
||||
platform = max(
|
||||
platform_counts.keys(),
|
||||
key=lambda p: (
|
||||
platform_counts[p],
|
||||
-MEMORY_IMPACT_PLATFORM_PREFERENCE.index(p),
|
||||
),
|
||||
)
|
||||
|
||||
# Debug output
|
||||
print("Memory impact analysis:", file=sys.stderr)
|
||||
print(f" Changed components: {sorted(changed_component_set)}", file=sys.stderr)
|
||||
print(f" Components with tests: {components_with_tests}", file=sys.stderr)
|
||||
print(
|
||||
f" Component platforms: {dict(sorted(component_platforms_map.items()))}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(f" Common platforms: {sorted(common_platforms)}", file=sys.stderr)
|
||||
print(f" Selected platform: {platform}", file=sys.stderr)
|
||||
|
||||
return {
|
||||
"should_run": "true",
|
||||
"components": components_with_tests,
|
||||
"platform": platform,
|
||||
"use_merged_config": "true",
|
||||
}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
@@ -279,6 +442,9 @@ def main() -> None:
|
||||
if component not in directly_changed_components
|
||||
]
|
||||
|
||||
# Detect components for memory impact analysis (merged config)
|
||||
memory_impact = detect_memory_impact_config(args.branch)
|
||||
|
||||
# Build output
|
||||
output: dict[str, Any] = {
|
||||
"integration_tests": run_integration,
|
||||
@@ -292,6 +458,7 @@ def main() -> None:
|
||||
"component_test_count": len(changed_components_with_tests),
|
||||
"directly_changed_count": len(directly_changed_with_tests),
|
||||
"dependency_only_count": len(dependency_only_components),
|
||||
"memory_impact": memory_impact,
|
||||
}
|
||||
|
||||
# Output as JSON
|
||||
|
||||
@@ -29,6 +29,18 @@ YAML_FILE_EXTENSIONS = (".yaml", ".yml")
|
||||
# Component path prefix
|
||||
ESPHOME_COMPONENTS_PATH = "esphome/components/"
|
||||
|
||||
# Base bus components - these ARE the bus implementations and should not
|
||||
# be flagged as needing migration since they are the platform/base components
|
||||
BASE_BUS_COMPONENTS = {
|
||||
"i2c",
|
||||
"spi",
|
||||
"uart",
|
||||
"modbus",
|
||||
"canbus",
|
||||
"remote_transmitter",
|
||||
"remote_receiver",
|
||||
}
|
||||
|
||||
|
||||
def parse_list_components_output(output: str) -> list[str]:
|
||||
"""Parse the output from list-components.py script.
|
||||
@@ -46,6 +58,65 @@ def parse_list_components_output(output: str) -> list[str]:
|
||||
return [c.strip() for c in output.strip().split("\n") if c.strip()]
|
||||
|
||||
|
||||
def parse_test_filename(test_file: Path) -> tuple[str, str]:
|
||||
"""Parse test filename to extract test name and platform.
|
||||
|
||||
Test files follow the naming pattern: test.<platform>.yaml or test-<variant>.<platform>.yaml
|
||||
|
||||
Args:
|
||||
test_file: Path to test file
|
||||
|
||||
Returns:
|
||||
Tuple of (test_name, platform)
|
||||
"""
|
||||
parts = test_file.stem.split(".")
|
||||
if len(parts) == 2:
|
||||
return parts[0], parts[1] # test, platform
|
||||
return parts[0], "all"
|
||||
|
||||
|
||||
def get_component_from_path(file_path: str) -> str | None:
|
||||
"""Extract component name from a file path.
|
||||
|
||||
Args:
|
||||
file_path: Path to a file (e.g., "esphome/components/wifi/wifi.cpp")
|
||||
|
||||
Returns:
|
||||
Component name if path is in components directory, None otherwise
|
||||
"""
|
||||
if not file_path.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
return None
|
||||
parts = file_path.split("/")
|
||||
if len(parts) >= 3:
|
||||
return parts[2]
|
||||
return None
|
||||
|
||||
|
||||
def get_component_test_files(
|
||||
component: str, *, all_variants: bool = False
|
||||
) -> list[Path]:
|
||||
"""Get test files for a component.
|
||||
|
||||
Args:
|
||||
component: Component name (e.g., "wifi")
|
||||
all_variants: If True, returns all test files including variants (test-*.yaml).
|
||||
If False, returns only base test files (test.*.yaml).
|
||||
Default is False.
|
||||
|
||||
Returns:
|
||||
List of test file paths for the component, or empty list if none exist
|
||||
"""
|
||||
tests_dir = Path(root_path) / "tests" / "components" / component
|
||||
if not tests_dir.exists():
|
||||
return []
|
||||
|
||||
if all_variants:
|
||||
# Match both test.*.yaml and test-*.yaml patterns
|
||||
return list(tests_dir.glob("test[.-]*.yaml"))
|
||||
# Match only test.*.yaml (base tests)
|
||||
return list(tests_dir.glob("test.*.yaml"))
|
||||
|
||||
|
||||
def styled(color: str | tuple[str, ...], msg: str, reset: bool = True) -> str:
|
||||
prefix = "".join(color) if isinstance(color, tuple) else color
|
||||
suffix = colorama.Style.RESET_ALL if reset else ""
|
||||
@@ -314,11 +385,9 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
|
||||
# because changes in one file can affect other files in the same component.
|
||||
filtered_files = []
|
||||
for f in files:
|
||||
if f.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
# Check if file belongs to any of the changed components
|
||||
parts = f.split("/")
|
||||
if len(parts) >= 3 and parts[2] in component_set:
|
||||
filtered_files.append(f)
|
||||
component = get_component_from_path(f)
|
||||
if component and component in component_set:
|
||||
filtered_files.append(f)
|
||||
|
||||
return filtered_files
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helpers import changed_files, git_ls_files
|
||||
from helpers import changed_files, get_component_from_path, git_ls_files
|
||||
|
||||
from esphome.const import (
|
||||
KEY_CORE,
|
||||
@@ -30,11 +30,9 @@ def get_all_component_files() -> list[str]:
|
||||
def extract_component_names_array_from_files_array(files):
|
||||
components = []
|
||||
for file in files:
|
||||
file_parts = file.split("/")
|
||||
if len(file_parts) >= 4:
|
||||
component_name = file_parts[2]
|
||||
if component_name not in components:
|
||||
components.append(component_name)
|
||||
component_name = get_component_from_path(file)
|
||||
if component_name and component_name not in components:
|
||||
components.append(component_name)
|
||||
return components
|
||||
|
||||
|
||||
@@ -185,17 +183,20 @@ def main():
|
||||
"-c",
|
||||
"--changed",
|
||||
action="store_true",
|
||||
help="List all components required for testing based on changes (includes dependencies)",
|
||||
help="List all components with dependencies (used by clang-tidy). "
|
||||
"When base test infrastructure changes, returns ALL components.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-direct",
|
||||
action="store_true",
|
||||
help="List only directly changed components (without dependencies)",
|
||||
help="List only directly changed components, ignoring infrastructure changes "
|
||||
"(used by CI for isolation decisions)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--changed-with-deps",
|
||||
action="store_true",
|
||||
help="Output JSON with both directly changed and all changed components",
|
||||
help="Output JSON with both directly changed and all changed components "
|
||||
"(with dependencies), ignoring infrastructure changes (used by CI for test determination)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b", "--branch", help="Branch to compare changed files against"
|
||||
@@ -213,12 +214,34 @@ def main():
|
||||
# When --changed* is passed, only get the changed files
|
||||
changed = changed_files(args.branch)
|
||||
|
||||
# If any base test file(s) changed, there's no need to filter out components
|
||||
if any("tests/test_build_components" in file for file in changed):
|
||||
# Need to get all component files
|
||||
# If any base test file(s) changed, we need to check all components
|
||||
# BUT only for --changed (used by clang-tidy for comprehensive checking)
|
||||
# NOT for --changed-direct or --changed-with-deps (used by CI for targeted testing)
|
||||
#
|
||||
# Flag usage:
|
||||
# - --changed: Used by clang-tidy (script/helpers.py get_changed_components)
|
||||
# Returns: All components with dependencies when base test files change
|
||||
# Reason: Test infrastructure changes may affect any component
|
||||
#
|
||||
# - --changed-direct: Used by CI isolation (script/determine-jobs.py)
|
||||
# Returns: Only components with actual code changes (not infrastructure)
|
||||
# Reason: Only directly changed components need isolated testing
|
||||
#
|
||||
# - --changed-with-deps: Used by CI test determination (script/determine-jobs.py)
|
||||
# Returns: Components with code changes + their dependencies (not infrastructure)
|
||||
# Reason: CI needs to test changed components and their dependents
|
||||
base_test_changed = any(
|
||||
"tests/test_build_components" in file for file in changed
|
||||
)
|
||||
|
||||
if base_test_changed and not args.changed_direct and not args.changed_with_deps:
|
||||
# Base test infrastructure changed - load all component files
|
||||
# This is for --changed (clang-tidy) which needs comprehensive checking
|
||||
files = get_all_component_files()
|
||||
else:
|
||||
# Only look at changed component files
|
||||
# Only look at changed component files (ignore infrastructure changes)
|
||||
# For --changed-direct: only actual component code changes matter (for isolation)
|
||||
# For --changed-with-deps: only actual component code changes matter (for testing)
|
||||
files = [f for f in changed if filter_component_files(f)]
|
||||
else:
|
||||
# Get all component files
|
||||
|
||||
@@ -16,6 +16,7 @@ The merger handles:
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
@@ -28,6 +29,10 @@ from esphome import yaml_util
|
||||
from esphome.config_helpers import merge_config
|
||||
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
||||
|
||||
# Prefix for dependency markers in package tracking
|
||||
# Used to mark packages that are included transitively (e.g., uart via modbus)
|
||||
DEPENDENCY_MARKER_PREFIX = "_dep_"
|
||||
|
||||
|
||||
def load_yaml_file(yaml_file: Path) -> dict:
|
||||
"""Load YAML file using ESPHome's YAML loader.
|
||||
@@ -44,6 +49,34 @@ def load_yaml_file(yaml_file: Path) -> dict:
|
||||
return yaml_util.load_yaml(yaml_file)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def get_component_packages(
|
||||
component_name: str, platform: str, tests_dir_str: str
|
||||
) -> dict:
|
||||
"""Get packages dict from a component's test file with caching.
|
||||
|
||||
This function is cached to avoid re-loading and re-parsing the same file
|
||||
multiple times when extracting packages during cross-bus merging.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
platform: Platform name (e.g., "esp32-idf")
|
||||
tests_dir_str: String path to tests/components directory (must be string for cache hashability)
|
||||
|
||||
Returns:
|
||||
Dictionary with 'packages' key containing the raw packages dict from the YAML,
|
||||
or empty dict if no packages section exists
|
||||
"""
|
||||
tests_dir = Path(tests_dir_str)
|
||||
test_file = tests_dir / component_name / f"test.{platform}.yaml"
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
if "packages" not in comp_data or not isinstance(comp_data["packages"], dict):
|
||||
return {}
|
||||
|
||||
return comp_data["packages"]
|
||||
|
||||
|
||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
"""Extract COMMON BUS package includes from parsed YAML.
|
||||
|
||||
@@ -82,7 +115,7 @@ def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
if dep not in common_bus_packages:
|
||||
continue
|
||||
# Mark as included via dependency
|
||||
packages[f"_dep_{dep}"] = f"(included via {name})"
|
||||
packages[f"{DEPENDENCY_MARKER_PREFIX}{dep}"] = f"(included via {name})"
|
||||
|
||||
return packages
|
||||
|
||||
@@ -195,6 +228,9 @@ def merge_component_configs(
|
||||
# Start with empty config
|
||||
merged_config_data = {}
|
||||
|
||||
# Convert tests_dir to string for caching
|
||||
tests_dir_str = str(tests_dir)
|
||||
|
||||
# Process each component
|
||||
for comp_name in component_names:
|
||||
comp_dir = tests_dir / comp_name
|
||||
@@ -206,26 +242,29 @@ def merge_component_configs(
|
||||
# Load the component's test file
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
# Validate packages are compatible
|
||||
# Components with no packages (no_buses) can merge with any group
|
||||
# Merge packages from all components (cross-bus merging)
|
||||
# Components can have different packages (e.g., one with ble, another with uart)
|
||||
# as long as they don't conflict (checked by are_buses_compatible before calling this)
|
||||
comp_packages = extract_packages_from_yaml(comp_data)
|
||||
|
||||
if all_packages is None:
|
||||
# First component - set the baseline
|
||||
all_packages = comp_packages
|
||||
elif not comp_packages:
|
||||
# This component has no packages (no_buses) - it can merge with any group
|
||||
pass
|
||||
elif not all_packages:
|
||||
# Previous components had no packages, but this one does - adopt these packages
|
||||
all_packages = comp_packages
|
||||
elif comp_packages != all_packages:
|
||||
# Both have packages but they differ - this is an error
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has different packages than previous components. "
|
||||
f"Expected: {all_packages}, Got: {comp_packages}. "
|
||||
f"All components must use the same common bus configs to be merged."
|
||||
)
|
||||
# First component - initialize package dict
|
||||
all_packages = comp_packages if comp_packages else {}
|
||||
elif comp_packages:
|
||||
# Merge packages - combine all unique package types
|
||||
# If both have the same package type, verify they're identical
|
||||
for pkg_name, pkg_config in comp_packages.items():
|
||||
if pkg_name in all_packages:
|
||||
# Same package type - verify config matches
|
||||
if all_packages[pkg_name] != pkg_config:
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has conflicting config for package '{pkg_name}'. "
|
||||
f"Expected: {all_packages[pkg_name]}, Got: {pkg_config}. "
|
||||
f"Components with conflicting bus configs cannot be merged."
|
||||
)
|
||||
else:
|
||||
# New package type - add it
|
||||
all_packages[pkg_name] = pkg_config
|
||||
|
||||
# Handle $component_dir by replacing with absolute path
|
||||
# This allows components that use local file references to be grouped
|
||||
@@ -287,26 +326,51 @@ def merge_component_configs(
|
||||
# merge_config handles list merging with ID-based deduplication automatically
|
||||
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||
|
||||
# Add packages back (only once, since they're identical)
|
||||
# IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
|
||||
# Add merged packages back (union of all component packages)
|
||||
# IMPORTANT: Only include common bus packages (spi, i2c, uart, etc.)
|
||||
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
||||
if all_packages:
|
||||
first_comp_data = load_yaml_file(
|
||||
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
||||
)
|
||||
if "packages" in first_comp_data and isinstance(
|
||||
first_comp_data["packages"], dict
|
||||
):
|
||||
# Filter to only include common bus packages
|
||||
# Only dict format can contain common bus packages
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
filtered_packages = {
|
||||
name: value
|
||||
for name, value in first_comp_data["packages"].items()
|
||||
if name in common_bus_packages
|
||||
}
|
||||
if filtered_packages:
|
||||
merged_config_data["packages"] = filtered_packages
|
||||
# Build packages dict from merged all_packages
|
||||
# all_packages is a dict mapping package_name -> str(package_value)
|
||||
# We need to reconstruct the actual package values by loading them from any component
|
||||
# Since packages with the same name must have identical configs (verified above),
|
||||
# we can load the package value from the first component that has each package
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
merged_packages: dict[str, Any] = {}
|
||||
|
||||
# Collect packages that are included as dependencies
|
||||
# If modbus is present, uart is included via modbus.packages.uart
|
||||
packages_to_skip: set[str] = set()
|
||||
for pkg_name in all_packages:
|
||||
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||
# Extract the actual package name (remove _dep_ prefix)
|
||||
dep_name = pkg_name[len(DEPENDENCY_MARKER_PREFIX) :]
|
||||
packages_to_skip.add(dep_name)
|
||||
|
||||
for pkg_name in all_packages:
|
||||
# Skip dependency markers
|
||||
if pkg_name.startswith(DEPENDENCY_MARKER_PREFIX):
|
||||
continue
|
||||
# Skip non-common-bus packages
|
||||
if pkg_name not in common_bus_packages:
|
||||
continue
|
||||
# Skip packages that are included as dependencies of other packages
|
||||
# This prevents duplicate definitions (e.g., uart via modbus + uart separately)
|
||||
if pkg_name in packages_to_skip:
|
||||
continue
|
||||
|
||||
# Find a component that has this package and extract its value
|
||||
# Uses cached lookup to avoid re-loading the same files
|
||||
for comp_name in component_names:
|
||||
comp_packages = get_component_packages(
|
||||
comp_name, platform, tests_dir_str
|
||||
)
|
||||
if pkg_name in comp_packages:
|
||||
merged_packages[pkg_name] = comp_packages[pkg_name]
|
||||
break
|
||||
|
||||
if merged_packages:
|
||||
merged_config_data["packages"] = merged_packages
|
||||
|
||||
# Deduplicate items with same ID (keeps first occurrence)
|
||||
merged_config_data = deduplicate_by_id(merged_config_data)
|
||||
|
||||
@@ -22,10 +22,13 @@ sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from script.analyze_component_buses import (
|
||||
ISOLATED_COMPONENTS,
|
||||
ISOLATED_SIGNATURE_PREFIX,
|
||||
NO_BUSES_SIGNATURE,
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
merge_compatible_bus_groups,
|
||||
)
|
||||
from script.helpers import get_component_test_files
|
||||
|
||||
# Weighting for batch creation
|
||||
# Isolated components can't be grouped/merged, so they count as 10x
|
||||
@@ -33,23 +36,22 @@ from script.analyze_component_buses import (
|
||||
ISOLATED_WEIGHT = 10
|
||||
GROUPABLE_WEIGHT = 1
|
||||
|
||||
# Platform used for batching (platform-agnostic batching)
|
||||
# Batches are split across CI runners and each runner tests all platforms
|
||||
ALL_PLATFORMS = "all"
|
||||
|
||||
|
||||
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
||||
"""Check if a component has test files.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
tests_dir: Path to tests/components directory
|
||||
tests_dir: Path to tests/components directory (unused, kept for compatibility)
|
||||
|
||||
Returns:
|
||||
True if the component has test.*.yaml files
|
||||
"""
|
||||
component_dir = tests_dir / component_name
|
||||
if not component_dir.exists() or not component_dir.is_dir():
|
||||
return False
|
||||
|
||||
# Check for test.*.yaml files
|
||||
return any(component_dir.glob("test.*.yaml"))
|
||||
return bool(get_component_test_files(component_name))
|
||||
|
||||
|
||||
def create_intelligent_batches(
|
||||
@@ -57,7 +59,7 @@ def create_intelligent_batches(
|
||||
tests_dir: Path,
|
||||
batch_size: int = 40,
|
||||
directly_changed: set[str] | None = None,
|
||||
) -> list[list[str]]:
|
||||
) -> tuple[list[list[str]], dict[tuple[str, str], list[str]]]:
|
||||
"""Create batches optimized for component grouping.
|
||||
|
||||
Args:
|
||||
@@ -67,7 +69,9 @@ def create_intelligent_batches(
|
||||
directly_changed: Set of directly changed components (for logging only)
|
||||
|
||||
Returns:
|
||||
List of component batches (lists of component names)
|
||||
Tuple of (batches, signature_groups) where:
|
||||
- batches: List of component batches (lists of component names)
|
||||
- signature_groups: Dict mapping (platform, signature) to component lists
|
||||
"""
|
||||
# Filter out components without test files
|
||||
# Platform components like 'climate' and 'climate_ir' don't have test files
|
||||
@@ -91,8 +95,9 @@ def create_intelligent_batches(
|
||||
|
||||
# Group components by their bus signature ONLY (ignore platform)
|
||||
# All platforms will be tested by test_build_components.py for each batch
|
||||
# Key: signature, Value: list of components
|
||||
signature_groups: dict[str, list[str]] = defaultdict(list)
|
||||
# Key: (platform, signature), Value: list of components
|
||||
# We use ALL_PLATFORMS since batching is platform-agnostic
|
||||
signature_groups: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||
|
||||
for component in components_with_tests:
|
||||
# Components that can't be grouped get unique signatures
|
||||
@@ -107,7 +112,9 @@ def create_intelligent_batches(
|
||||
or (directly_changed and component in directly_changed)
|
||||
)
|
||||
if is_isolated:
|
||||
signature_groups[f"isolated_{component}"].append(component)
|
||||
signature_groups[
|
||||
(ALL_PLATFORMS, f"{ISOLATED_SIGNATURE_PREFIX}{component}")
|
||||
].append(component)
|
||||
continue
|
||||
|
||||
# Get signature from any platform (they should all have the same buses)
|
||||
@@ -117,11 +124,17 @@ def create_intelligent_batches(
|
||||
if buses:
|
||||
signature = create_grouping_signature({platform: buses}, platform)
|
||||
# Group by signature only - platform doesn't matter for batching
|
||||
signature_groups[signature].append(component)
|
||||
# Use ALL_PLATFORMS since we're batching across all platforms
|
||||
signature_groups[(ALL_PLATFORMS, signature)].append(component)
|
||||
break # Only use first platform for grouping
|
||||
else:
|
||||
# No buses found for any platform - can be grouped together
|
||||
signature_groups[NO_BUSES_SIGNATURE].append(component)
|
||||
signature_groups[(ALL_PLATFORMS, NO_BUSES_SIGNATURE)].append(component)
|
||||
|
||||
# Merge compatible bus groups (cross-bus optimization)
|
||||
# This allows components with different buses (ble + uart) to be batched together
|
||||
# improving the efficiency of test_build_components.py grouping
|
||||
signature_groups = merge_compatible_bus_groups(signature_groups)
|
||||
|
||||
# Create batches by keeping signature groups together
|
||||
# Components with the same signature stay in the same batches
|
||||
@@ -132,8 +145,8 @@ def create_intelligent_batches(
|
||||
# 2. Sort groupable signatures by size (largest first)
|
||||
# 3. "no_buses" components CAN be grouped together
|
||||
def sort_key(item):
|
||||
signature, components = item
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
(_platform, signature), components = item
|
||||
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||
# Put "isolated_*" last (1), groupable first (0)
|
||||
# Within each category, sort by size (largest first)
|
||||
return (is_isolated, -len(components))
|
||||
@@ -149,8 +162,8 @@ def create_intelligent_batches(
|
||||
current_batch = []
|
||||
current_weight = 0
|
||||
|
||||
for signature, group_components in sorted_groups:
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
for (_platform, signature), group_components in sorted_groups:
|
||||
is_isolated = signature.startswith(ISOLATED_SIGNATURE_PREFIX)
|
||||
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
||||
|
||||
for component in group_components:
|
||||
@@ -169,7 +182,7 @@ def create_intelligent_batches(
|
||||
if current_batch:
|
||||
batches.append(current_batch)
|
||||
|
||||
return batches
|
||||
return batches, signature_groups
|
||||
|
||||
|
||||
def main() -> int:
|
||||
@@ -231,7 +244,7 @@ def main() -> int:
|
||||
return 1
|
||||
|
||||
# Create intelligent batches
|
||||
batches = create_intelligent_batches(
|
||||
batches, signature_groups = create_intelligent_batches(
|
||||
components=components,
|
||||
tests_dir=args.tests_dir,
|
||||
batch_size=args.batch_size,
|
||||
@@ -256,6 +269,58 @@ def main() -> int:
|
||||
# Re-analyze to get isolated component counts for summary
|
||||
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
||||
|
||||
# Show grouping details
|
||||
print("\n=== Component Grouping Details ===", file=sys.stderr)
|
||||
# Sort groups by signature for readability
|
||||
groupable_groups = []
|
||||
isolated_groups = []
|
||||
for (platform, signature), group_comps in sorted(signature_groups.items()):
|
||||
if signature.startswith(ISOLATED_SIGNATURE_PREFIX):
|
||||
isolated_groups.append((signature, group_comps))
|
||||
else:
|
||||
groupable_groups.append((signature, group_comps))
|
||||
|
||||
if groupable_groups:
|
||||
print(
|
||||
f"\nGroupable signatures ({len(groupable_groups)} merged groups after cross-bus optimization):",
|
||||
file=sys.stderr,
|
||||
)
|
||||
for signature, group_comps in sorted(
|
||||
groupable_groups, key=lambda x: (-len(x[1]), x[0])
|
||||
):
|
||||
# Check if this is a merged signature (contains +)
|
||||
is_merged = "+" in signature and signature != NO_BUSES_SIGNATURE
|
||||
# Special handling for no_buses components
|
||||
if signature == NO_BUSES_SIGNATURE:
|
||||
print(
|
||||
f" [{signature}]: {len(group_comps)} components (used as fillers across batches)",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
merge_indicator = " [MERGED]" if is_merged else ""
|
||||
print(
|
||||
f" [{signature}]{merge_indicator}: {len(group_comps)} components",
|
||||
file=sys.stderr,
|
||||
)
|
||||
# Show first few components as examples
|
||||
examples = ", ".join(sorted(group_comps)[:8])
|
||||
if len(group_comps) > 8:
|
||||
examples += f", ... (+{len(group_comps) - 8} more)"
|
||||
print(f" → {examples}", file=sys.stderr)
|
||||
|
||||
if isolated_groups:
|
||||
print(
|
||||
f"\nIsolated components ({len(isolated_groups)} components - tested individually):",
|
||||
file=sys.stderr,
|
||||
)
|
||||
isolated_names = sorted(
|
||||
[comp for _, comps in isolated_groups for comp in comps]
|
||||
)
|
||||
# Group isolated components for compact display
|
||||
for i in range(0, len(isolated_names), 10):
|
||||
chunk = isolated_names[i : i + 10]
|
||||
print(f" {', '.join(chunk)}", file=sys.stderr)
|
||||
|
||||
# Count isolated vs groupable components
|
||||
all_batched_components = [comp for batch in batches for comp in batch]
|
||||
isolated_count = sum(
|
||||
|
||||
27
script/templates/ci_memory_impact_comment_template.j2
Normal file
27
script/templates/ci_memory_impact_comment_template.j2
Normal file
@@ -0,0 +1,27 @@
|
||||
{{ comment_marker }}
|
||||
## Memory Impact Analysis
|
||||
|
||||
**Components:** {{ components_str }}
|
||||
**Platform:** `{{ platform }}`
|
||||
|
||||
| Metric | Target Branch | This PR | Change |
|
||||
|--------|--------------|---------|--------|
|
||||
| **RAM** | {{ target_ram }} | {{ pr_ram }} | {{ ram_change }} |
|
||||
| **Flash** | {{ target_flash }} | {{ pr_flash }} | {{ flash_change }} |
|
||||
{% if component_breakdown %}
|
||||
{{ component_breakdown }}
|
||||
{% endif %}
|
||||
{% if symbol_changes %}
|
||||
{{ symbol_changes }}
|
||||
{% endif %}
|
||||
{%- if target_cache_hit %}
|
||||
|
||||
> ⚡ Target branch analysis was loaded from cache (build skipped for faster CI).
|
||||
{%- endif %}
|
||||
|
||||
---
|
||||
> **Note:** This analysis measures **static RAM and Flash usage** only (compile-time allocation).
|
||||
> **Dynamic memory (heap)** cannot be measured automatically.
|
||||
> **⚠️ You must test this PR on a real device** to measure free heap and ensure no runtime memory issues.
|
||||
|
||||
*This analysis runs automatically when components change. Memory usage is measured from {{ config_note }}.*
|
||||
15
script/templates/ci_memory_impact_component_breakdown.j2
Normal file
15
script/templates/ci_memory_impact_component_breakdown.j2
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
<details open>
|
||||
<summary>📊 Component Memory Breakdown</summary>
|
||||
|
||||
| Component | Target Flash | PR Flash | Change |
|
||||
|-----------|--------------|----------|--------|
|
||||
{% for comp, target_flash, pr_flash, delta in changed_components[:max_rows] -%}
|
||||
{% set threshold = component_change_threshold if comp.startswith("[esphome]") else none -%}
|
||||
| `{{ comp }}` | {{ target_flash|format_bytes }} | {{ pr_flash|format_bytes }} | {{ format_change(target_flash, pr_flash, threshold=threshold) }} |
|
||||
{% endfor -%}
|
||||
{% if changed_components|length > max_rows -%}
|
||||
| ... | ... | ... | *({{ changed_components|length - max_rows }} more components not shown)* |
|
||||
{% endif -%}
|
||||
|
||||
</details>
|
||||
8
script/templates/ci_memory_impact_macros.j2
Normal file
8
script/templates/ci_memory_impact_macros.j2
Normal file
@@ -0,0 +1,8 @@
|
||||
{#- Macro for formatting symbol names in tables -#}
|
||||
{%- macro format_symbol(symbol, max_length, truncate_length) -%}
|
||||
{%- if symbol|length <= max_length -%}
|
||||
`{{ symbol }}`
|
||||
{%- else -%}
|
||||
<details><summary><code>{{ symbol[:truncate_length] }}...</code></summary><code>{{ symbol }}</code></details>
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
51
script/templates/ci_memory_impact_symbol_changes.j2
Normal file
51
script/templates/ci_memory_impact_symbol_changes.j2
Normal file
@@ -0,0 +1,51 @@
|
||||
{%- from 'ci_memory_impact_macros.j2' import format_symbol -%}
|
||||
|
||||
<details>
|
||||
<summary>🔍 Symbol-Level Changes (click to expand)</summary>
|
||||
|
||||
{% if changed_symbols %}
|
||||
|
||||
### Changed Symbols
|
||||
|
||||
| Symbol | Target Size | PR Size | Change |
|
||||
|--------|-------------|---------|--------|
|
||||
{% for symbol, target_size, pr_size, delta in changed_symbols[:max_changed_rows] -%}
|
||||
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ target_size|format_bytes }} | {{ pr_size|format_bytes }} | {{ format_change(target_size, pr_size) }} |
|
||||
{% endfor -%}
|
||||
{% if changed_symbols|length > max_changed_rows -%}
|
||||
| ... | ... | ... | *({{ changed_symbols|length - max_changed_rows }} more changed symbols not shown)* |
|
||||
{% endif -%}
|
||||
|
||||
{% endif %}
|
||||
{% if new_symbols %}
|
||||
|
||||
### New Symbols (top {{ max_new_rows }})
|
||||
|
||||
| Symbol | Size |
|
||||
|--------|------|
|
||||
{% for symbol, size in new_symbols[:max_new_rows] -%}
|
||||
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
|
||||
{% endfor -%}
|
||||
{% if new_symbols|length > max_new_rows -%}
|
||||
{% set total_new_size = new_symbols|sum(attribute=1) -%}
|
||||
| *{{ new_symbols|length - max_new_rows }} more new symbols...* | *Total: {{ total_new_size|format_bytes }}* |
|
||||
{% endif -%}
|
||||
|
||||
{% endif %}
|
||||
{% if removed_symbols %}
|
||||
|
||||
### Removed Symbols (top {{ max_removed_rows }})
|
||||
|
||||
| Symbol | Size |
|
||||
|--------|------|
|
||||
{% for symbol, size in removed_symbols[:max_removed_rows] -%}
|
||||
| {{ format_symbol(symbol, symbol_max_length, symbol_truncate_length) }} | {{ size|format_bytes }} |
|
||||
{% endfor -%}
|
||||
{% if removed_symbols|length > max_removed_rows -%}
|
||||
{% set total_removed_size = removed_symbols|sum(attribute=1) -%}
|
||||
| *{{ removed_symbols|length - max_removed_rows }} more removed symbols...* | *Total: {{ total_removed_size|format_bytes }}* |
|
||||
{% endif -%}
|
||||
|
||||
{% endif %}
|
||||
|
||||
</details>
|
||||
@@ -17,11 +17,13 @@ from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
@@ -34,42 +36,61 @@ from script.analyze_component_buses import (
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
is_platform_component,
|
||||
merge_compatible_bus_groups,
|
||||
uses_local_file_references,
|
||||
)
|
||||
from script.helpers import get_component_test_files
|
||||
from script.merge_component_configs import merge_component_configs
|
||||
|
||||
# Platform-specific maximum group sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
PLATFORM_MAX_GROUP_SIZE = {
|
||||
"esp8266-ard": 10, # ESP8266 Arduino has limited IRAM
|
||||
"esp8266-idf": 10, # ESP8266 IDF also has limited IRAM
|
||||
# BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
|
||||
# Other platforms can handle larger groups
|
||||
}
|
||||
|
||||
@dataclass
|
||||
class TestResult:
|
||||
"""Store information about a single test run."""
|
||||
|
||||
test_id: str
|
||||
components: list[str]
|
||||
platform: str
|
||||
success: bool
|
||||
duration: float
|
||||
command: str = ""
|
||||
test_type: str = "compile" # "config" or "compile"
|
||||
|
||||
|
||||
def show_disk_space_if_ci(esphome_command: str) -> None:
|
||||
"""Show disk space usage if running in CI during compile.
|
||||
|
||||
Only shows output during compilation (not config validation) since
|
||||
disk space is only relevant when actually building firmware.
|
||||
|
||||
Args:
|
||||
esphome_command: The esphome command being run (config/compile/clean)
|
||||
"""
|
||||
if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
|
||||
print("\n" + "=" * 80)
|
||||
print("Disk Space After Build:")
|
||||
print("=" * 80)
|
||||
subprocess.run(["df", "-h"], check=False)
|
||||
print("=" * 80 + "\n")
|
||||
# Only show disk space during compilation in CI
|
||||
# Config validation doesn't build anything so disk space isn't relevant
|
||||
if not os.environ.get("GITHUB_ACTIONS"):
|
||||
return
|
||||
if esphome_command != "compile":
|
||||
return
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("Disk Space After Build:")
|
||||
print("=" * 80)
|
||||
# Use sys.stdout.flush() to ensure output appears immediately
|
||||
sys.stdout.flush()
|
||||
subprocess.run(["df", "-h"], check=False, stdout=sys.stdout, stderr=sys.stderr)
|
||||
print("=" * 80 + "\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def find_component_tests(
|
||||
components_dir: Path, component_pattern: str = "*"
|
||||
components_dir: Path, component_pattern: str = "*", base_only: bool = False
|
||||
) -> dict[str, list[Path]]:
|
||||
"""Find all component test files.
|
||||
|
||||
Args:
|
||||
components_dir: Path to tests/components directory
|
||||
component_pattern: Glob pattern for component names
|
||||
base_only: If True, only find base test files (test.*.yaml), not variant files (test-*.yaml)
|
||||
|
||||
Returns:
|
||||
Dictionary mapping component name to list of test files
|
||||
@@ -80,8 +101,10 @@ def find_component_tests(
|
||||
if not comp_dir.is_dir():
|
||||
continue
|
||||
|
||||
for test_file in comp_dir.glob("test.*.yaml"):
|
||||
component_tests[comp_dir.name].append(test_file)
|
||||
# Get test files using helper function
|
||||
test_files = get_component_test_files(comp_dir.name, all_variants=not base_only)
|
||||
if test_files:
|
||||
component_tests[comp_dir.name] = test_files
|
||||
|
||||
return dict(component_tests)
|
||||
|
||||
@@ -128,6 +151,140 @@ def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
|
||||
return dict(platform_files)
|
||||
|
||||
|
||||
def group_components_by_platform(
|
||||
failed_results: list[TestResult],
|
||||
) -> dict[tuple[str, str], list[str]]:
|
||||
"""Group failed components by platform and test type for simplified reproduction commands.
|
||||
|
||||
Args:
|
||||
failed_results: List of failed test results
|
||||
|
||||
Returns:
|
||||
Dictionary mapping (platform, test_type) to list of component names
|
||||
"""
|
||||
platform_components: dict[tuple[str, str], list[str]] = {}
|
||||
for result in failed_results:
|
||||
key = (result.platform, result.test_type)
|
||||
if key not in platform_components:
|
||||
platform_components[key] = []
|
||||
platform_components[key].extend(result.components)
|
||||
|
||||
# Remove duplicates and sort for each platform
|
||||
return {
|
||||
key: sorted(set(components)) for key, components in platform_components.items()
|
||||
}
|
||||
|
||||
|
||||
def format_github_summary(test_results: list[TestResult]) -> str:
|
||||
"""Format test results as GitHub Actions job summary markdown.
|
||||
|
||||
Args:
|
||||
test_results: List of all test results
|
||||
|
||||
Returns:
|
||||
Markdown formatted summary string
|
||||
"""
|
||||
# Separate results into passed and failed
|
||||
passed_results = [r for r in test_results if r.success]
|
||||
failed_results = [r for r in test_results if not r.success]
|
||||
|
||||
lines = []
|
||||
|
||||
# Header with emoji based on success/failure
|
||||
if failed_results:
|
||||
lines.append("## :x: Component Tests Failed\n")
|
||||
else:
|
||||
lines.append("## :white_check_mark: Component Tests Passed\n")
|
||||
|
||||
# Summary statistics
|
||||
total_time = sum(r.duration for r in test_results)
|
||||
# Determine test type from results (all should be the same)
|
||||
test_type = test_results[0].test_type if test_results else "unknown"
|
||||
lines.append(
|
||||
f"**Results:** {len(passed_results)} passed, {len(failed_results)} failed\n"
|
||||
)
|
||||
lines.append(f"**Total time:** {total_time:.1f}s\n")
|
||||
lines.append(f"**Test type:** `{test_type}`\n")
|
||||
|
||||
# Show failed tests if any
|
||||
if failed_results:
|
||||
lines.append("### Failed Tests\n")
|
||||
lines.append("| Test | Components | Platform | Duration |\n")
|
||||
lines.append("|------|-----------|----------|----------|\n")
|
||||
for result in failed_results:
|
||||
components_str = ", ".join(result.components)
|
||||
lines.append(
|
||||
f"| `{result.test_id}` | {components_str} | {result.platform} | {result.duration:.1f}s |\n"
|
||||
)
|
||||
lines.append("\n")
|
||||
|
||||
# Show simplified commands to reproduce failures
|
||||
# Group all failed components by platform for a single command per platform
|
||||
lines.append("<details>\n")
|
||||
lines.append("<summary>Commands to reproduce failures</summary>\n\n")
|
||||
lines.append("```bash\n")
|
||||
|
||||
# Generate one command per platform and test type
|
||||
platform_components = group_components_by_platform(failed_results)
|
||||
for platform, test_type in sorted(platform_components.keys()):
|
||||
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||
lines.append(
|
||||
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}\n"
|
||||
)
|
||||
|
||||
lines.append("```\n")
|
||||
lines.append("</details>\n")
|
||||
|
||||
# Show passed tests
|
||||
if passed_results:
|
||||
lines.append("### Passed Tests\n\n")
|
||||
lines.append(f"{len(passed_results)} tests passed successfully\n")
|
||||
|
||||
# Separate grouped and individual tests
|
||||
grouped_results = [r for r in passed_results if len(r.components) > 1]
|
||||
individual_results = [r for r in passed_results if len(r.components) == 1]
|
||||
|
||||
if grouped_results:
|
||||
lines.append("#### Grouped Tests\n")
|
||||
lines.append("| Components | Platform | Count | Duration |\n")
|
||||
lines.append("|-----------|----------|-------|----------|\n")
|
||||
for result in grouped_results:
|
||||
components_str = ", ".join(result.components)
|
||||
lines.append(
|
||||
f"| {components_str} | {result.platform} | {len(result.components)} | {result.duration:.1f}s |\n"
|
||||
)
|
||||
lines.append("\n")
|
||||
|
||||
if individual_results:
|
||||
lines.append("#### Individual Tests\n")
|
||||
# Show first 10 individual tests with timing
|
||||
if len(individual_results) <= 10:
|
||||
lines.extend(
|
||||
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||
for result in individual_results
|
||||
)
|
||||
else:
|
||||
lines.extend(
|
||||
f"- `{result.test_id}` - {result.duration:.1f}s\n"
|
||||
for result in individual_results[:10]
|
||||
)
|
||||
lines.append(f"\n...and {len(individual_results) - 10} more\n")
|
||||
lines.append("\n")
|
||||
|
||||
return "".join(lines)
|
||||
|
||||
|
||||
def write_github_summary(test_results: list[TestResult]) -> None:
|
||||
"""Write GitHub Actions job summary with test results and timing.
|
||||
|
||||
Args:
|
||||
test_results: List of all test results
|
||||
"""
|
||||
summary_content = format_github_summary(test_results)
|
||||
with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as f:
|
||||
f.write(summary_content)
|
||||
|
||||
|
||||
def extract_platform_with_version(base_file: Path) -> str:
|
||||
"""Extract platform with version from base filename.
|
||||
|
||||
@@ -151,7 +308,7 @@ def run_esphome_test(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
use_testing_mode: bool = False,
|
||||
) -> tuple[bool, str]:
|
||||
) -> TestResult:
|
||||
"""Run esphome test for a single component.
|
||||
|
||||
Args:
|
||||
@@ -166,7 +323,7 @@ def run_esphome_test(
|
||||
use_testing_mode: Whether to use --testing-mode flag
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
TestResult object with test details and timing
|
||||
"""
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
|
||||
@@ -221,9 +378,13 @@ def run_esphome_test(
|
||||
if use_testing_mode:
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
start_time = time.time()
|
||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
@@ -236,12 +397,30 @@ def run_esphome_test(
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=[component],
|
||||
platform=platform_with_version,
|
||||
success=success,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
duration = time.time() - start_time
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=[component],
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
|
||||
def run_grouped_test(
|
||||
@@ -253,7 +432,7 @@ def run_grouped_test(
|
||||
tests_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
) -> tuple[bool, str]:
|
||||
) -> TestResult:
|
||||
"""Run esphome test for a group of components with shared bus configs.
|
||||
|
||||
Args:
|
||||
@@ -267,7 +446,7 @@ def run_grouped_test(
|
||||
continue_on_fail: Whether to continue on failure
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
TestResult object with test details and timing
|
||||
"""
|
||||
# Create merged config
|
||||
group_name = "_".join(components[:3]) # Use first 3 components for name
|
||||
@@ -294,8 +473,17 @@ def run_grouped_test(
|
||||
print(f"Error merging configs for {components}: {e}")
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
# Return empty command string since we failed before building the command
|
||||
return False, f"# Failed during config merge: {e}"
|
||||
# Return TestResult for merge failure
|
||||
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=0.0,
|
||||
command=f"# Failed during config merge: {e}",
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
# Create test file that includes merged config
|
||||
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
||||
@@ -334,9 +522,13 @@ def run_grouped_test(
|
||||
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
start_time = time.time()
|
||||
test_id = f"GROUPED[{','.join(components)}].{platform_with_version}"
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
@@ -349,12 +541,30 @@ def run_grouped_test(
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=success,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
duration = time.time() - start_time
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
return TestResult(
|
||||
test_id=test_id,
|
||||
components=components,
|
||||
platform=platform_with_version,
|
||||
success=False,
|
||||
duration=duration,
|
||||
command=cmd_str,
|
||||
test_type=esphome_command,
|
||||
)
|
||||
|
||||
|
||||
def run_grouped_component_tests(
|
||||
@@ -366,7 +576,7 @@ def run_grouped_component_tests(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
additional_isolated: set[str] | None = None,
|
||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
||||
) -> tuple[set[tuple[str, str]], list[TestResult]]:
|
||||
"""Run grouped component tests.
|
||||
|
||||
Args:
|
||||
@@ -380,12 +590,10 @@ def run_grouped_component_tests(
|
||||
additional_isolated: Additional components to treat as isolated (not grouped)
|
||||
|
||||
Returns:
|
||||
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
||||
Tuple of (tested_components, test_results)
|
||||
"""
|
||||
tested_components = set()
|
||||
passed_tests = []
|
||||
failed_tests = []
|
||||
failed_commands = {} # Map test_id to command string
|
||||
test_results = []
|
||||
|
||||
# Group components by platform and bus signature
|
||||
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||
@@ -462,6 +670,11 @@ def run_grouped_component_tests(
|
||||
if signature:
|
||||
grouped_components[(platform, signature)].append(component)
|
||||
|
||||
# Merge groups with compatible buses (cross-bus grouping optimization)
|
||||
# This allows mixing components with different buses (e.g., ble + uart)
|
||||
# as long as they don't have conflicting configurations for the same bus type
|
||||
grouped_components = merge_compatible_bus_groups(grouped_components)
|
||||
|
||||
# Print detailed grouping plan
|
||||
print("\nGrouping Plan:")
|
||||
print("-" * 80)
|
||||
@@ -560,28 +773,6 @@ def run_grouped_component_tests(
|
||||
# No other groups for this platform - keep no_buses components together
|
||||
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
||||
|
||||
# Split groups that exceed platform-specific maximum sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
split_groups = {}
|
||||
for (platform, signature), components in list(grouped_components.items()):
|
||||
max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
|
||||
if max_size and len(components) > max_size:
|
||||
# Split this group into smaller groups
|
||||
print(
|
||||
f"\n ℹ️ Splitting {platform} group (signature: {signature}) "
|
||||
f"from {len(components)} to max {max_size} components per group"
|
||||
)
|
||||
# Remove original group
|
||||
del grouped_components[(platform, signature)]
|
||||
# Create split groups
|
||||
for i in range(0, len(components), max_size):
|
||||
split_components = components[i : i + max_size]
|
||||
# Create unique signature for each split group
|
||||
split_signature = f"{signature}_split{i // max_size + 1}"
|
||||
split_groups[(platform, split_signature)] = split_components
|
||||
# Add split groups back
|
||||
grouped_components.update(split_groups)
|
||||
|
||||
groups_to_test = []
|
||||
individual_tests = set() # Use set to avoid duplicates
|
||||
|
||||
@@ -672,7 +863,7 @@ def run_grouped_component_tests(
|
||||
continue
|
||||
|
||||
# Run grouped test
|
||||
success, cmd_str = run_grouped_test(
|
||||
test_result = run_grouped_test(
|
||||
components=components_to_group,
|
||||
platform=platform,
|
||||
platform_with_version=platform_with_version,
|
||||
@@ -687,17 +878,10 @@ def run_grouped_component_tests(
|
||||
for comp in components_to_group:
|
||||
tested_components.add((comp, platform_with_version))
|
||||
|
||||
# Record result for each component - show all components in grouped tests
|
||||
test_id = (
|
||||
f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
|
||||
)
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
# Store test result
|
||||
test_results.append(test_result)
|
||||
|
||||
return tested_components, passed_tests, failed_tests, failed_commands
|
||||
return tested_components, test_results
|
||||
|
||||
|
||||
def run_individual_component_test(
|
||||
@@ -710,9 +894,7 @@ def run_individual_component_test(
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
tested_components: set[tuple[str, str]],
|
||||
passed_tests: list[str],
|
||||
failed_tests: list[str],
|
||||
failed_commands: dict[str, str],
|
||||
test_results: list[TestResult],
|
||||
) -> None:
|
||||
"""Run an individual component test if not already tested in a group.
|
||||
|
||||
@@ -726,16 +908,13 @@ def run_individual_component_test(
|
||||
esphome_command: ESPHome command
|
||||
continue_on_fail: Whether to continue on failure
|
||||
tested_components: Set of already tested components
|
||||
passed_tests: List to append passed test IDs
|
||||
failed_tests: List to append failed test IDs
|
||||
failed_commands: Dict to store failed test commands
|
||||
test_results: List to append test results
|
||||
"""
|
||||
# Skip if already tested in a group
|
||||
if (component, platform_with_version) in tested_components:
|
||||
return
|
||||
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
success, cmd_str = run_esphome_test(
|
||||
test_result = run_esphome_test(
|
||||
component=component,
|
||||
test_file=test_file,
|
||||
platform=platform,
|
||||
@@ -745,12 +924,7 @@ def run_individual_component_test(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
)
|
||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
test_results.append(test_result)
|
||||
|
||||
|
||||
def test_components(
|
||||
@@ -760,6 +934,7 @@ def test_components(
|
||||
continue_on_fail: bool,
|
||||
enable_grouping: bool = True,
|
||||
isolated_components: set[str] | None = None,
|
||||
base_only: bool = False,
|
||||
) -> int:
|
||||
"""Test components with optional intelligent grouping.
|
||||
|
||||
@@ -773,6 +948,7 @@ def test_components(
|
||||
These are tested WITHOUT --testing-mode to enable full validation
|
||||
(pin conflicts, etc). This is used in CI for directly changed components
|
||||
to catch issues that would be missed with --testing-mode.
|
||||
base_only: If True, only test base test files (test.*.yaml), not variant files (test-*.yaml)
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for failure)
|
||||
@@ -790,7 +966,7 @@ def test_components(
|
||||
# Find all component tests
|
||||
all_tests = {}
|
||||
for pattern in component_patterns:
|
||||
all_tests.update(find_component_tests(tests_dir, pattern))
|
||||
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
|
||||
|
||||
if not all_tests:
|
||||
print(f"No components found matching: {component_patterns}")
|
||||
@@ -799,19 +975,12 @@ def test_components(
|
||||
print(f"Found {len(all_tests)} components to test")
|
||||
|
||||
# Run tests
|
||||
failed_tests = []
|
||||
passed_tests = []
|
||||
test_results = []
|
||||
tested_components = set() # Track which components were tested in groups
|
||||
failed_commands = {} # Track commands for failed tests
|
||||
|
||||
# First, run grouped tests if grouping is enabled
|
||||
if enable_grouping:
|
||||
(
|
||||
tested_components,
|
||||
passed_tests,
|
||||
failed_tests,
|
||||
failed_commands,
|
||||
) = run_grouped_component_tests(
|
||||
tested_components, grouped_results = run_grouped_component_tests(
|
||||
all_tests=all_tests,
|
||||
platform_filter=platform_filter,
|
||||
platform_bases=platform_bases,
|
||||
@@ -821,6 +990,7 @@ def test_components(
|
||||
continue_on_fail=continue_on_fail,
|
||||
additional_isolated=isolated_components,
|
||||
)
|
||||
test_results.extend(grouped_results)
|
||||
|
||||
# Then run individual tests for components not in groups
|
||||
for component, test_files in sorted(all_tests.items()):
|
||||
@@ -846,9 +1016,7 @@ def test_components(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
test_results=test_results,
|
||||
)
|
||||
else:
|
||||
# Platform-specific test
|
||||
@@ -880,31 +1048,40 @@ def test_components(
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
test_results=test_results,
|
||||
)
|
||||
|
||||
# Separate results into passed and failed
|
||||
passed_results = [r for r in test_results if r.success]
|
||||
failed_results = [r for r in test_results if not r.success]
|
||||
|
||||
# Print summary
|
||||
print("\n" + "=" * 80)
|
||||
print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
|
||||
print(f"Test Summary: {len(passed_results)} passed, {len(failed_results)} failed")
|
||||
print("=" * 80)
|
||||
|
||||
if failed_tests:
|
||||
if failed_results:
|
||||
print("\nFailed tests:")
|
||||
for test in failed_tests:
|
||||
print(f" - {test}")
|
||||
for result in failed_results:
|
||||
print(f" - {result.test_id}")
|
||||
|
||||
# Print failed commands at the end for easy copy-paste from CI logs
|
||||
# Print simplified commands grouped by platform and test type for easy copy-paste
|
||||
print("\n" + "=" * 80)
|
||||
print("Failed test commands (copy-paste to reproduce locally):")
|
||||
print("Commands to reproduce failures (copy-paste to reproduce locally):")
|
||||
print("=" * 80)
|
||||
for test in failed_tests:
|
||||
if test in failed_commands:
|
||||
print(f"\n# {test}")
|
||||
print(failed_commands[test])
|
||||
platform_components = group_components_by_platform(failed_results)
|
||||
for platform, test_type in sorted(platform_components.keys()):
|
||||
components_csv = ",".join(platform_components[(platform, test_type)])
|
||||
print(
|
||||
f"script/test_build_components.py -c {components_csv} -t {platform} -e {test_type}"
|
||||
)
|
||||
print()
|
||||
|
||||
# Write GitHub Actions job summary if in CI
|
||||
if os.environ.get("GITHUB_STEP_SUMMARY"):
|
||||
write_github_summary(test_results)
|
||||
|
||||
if failed_results:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
@@ -950,6 +1127,11 @@ def main() -> int:
|
||||
"These are tested WITHOUT --testing-mode to enable full validation. "
|
||||
"Used in CI for directly changed components to catch pin conflicts and other issues.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--base-only",
|
||||
action="store_true",
|
||||
help="Only test base test files (test.*.yaml), not variant files (test-*.yaml)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -968,6 +1150,7 @@ def main() -> int:
|
||||
continue_on_fail=args.continue_on_fail,
|
||||
enable_grouping=not args.no_grouping,
|
||||
isolated_components=isolated_components,
|
||||
base_only=args.base_only,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,14 +8,12 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 0.6;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
- platform: template
|
||||
id: template_temperature
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
substitutions:
|
||||
irq0_pin: GPIO13
|
||||
irq0_pin: GPIO0
|
||||
irq1_pin: GPIO15
|
||||
reset_pin: GPIO16
|
||||
|
||||
|
||||
@@ -4,10 +4,13 @@ sensor:
|
||||
irq_pin: ${irq_pin}
|
||||
voltage:
|
||||
name: ADE7953 Voltage
|
||||
id: ade7953_i2c_voltage
|
||||
current_a:
|
||||
name: ADE7953 Current A
|
||||
id: ade7953_i2c_current_a
|
||||
current_b:
|
||||
name: ADE7953 Current B
|
||||
id: ade7953_i2c_current_b
|
||||
power_factor_a:
|
||||
name: ADE7953 Power Factor A
|
||||
power_factor_b:
|
||||
|
||||
@@ -4,13 +4,13 @@ sensor:
|
||||
irq_pin: ${irq_pin}
|
||||
voltage:
|
||||
name: ADE7953 Voltage
|
||||
id: ade7953_voltage
|
||||
id: ade7953_spi_voltage
|
||||
current_a:
|
||||
name: ADE7953 Current A
|
||||
id: ade7953_current_a
|
||||
id: ade7953_spi_current_a
|
||||
current_b:
|
||||
name: ADE7953 Current B
|
||||
id: ade7953_current_b
|
||||
id: ade7953_spi_current_b
|
||||
power_factor_a:
|
||||
name: ADE7953 Power Factor A
|
||||
power_factor_b:
|
||||
|
||||
@@ -5,9 +5,8 @@ sensor:
|
||||
lambda: |-
|
||||
if (millis() > 10000) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
return 0.0;
|
||||
update_interval: 15s
|
||||
|
||||
binary_sensor:
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
as3935_i2c:
|
||||
id: as3935_i2c_id
|
||||
i2c_id: i2c_bus
|
||||
irq_pin: ${irq_pin}
|
||||
|
||||
binary_sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_i2c_id
|
||||
name: Storm Alert
|
||||
|
||||
sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_i2c_id
|
||||
lightning_energy:
|
||||
name: Lightning Energy
|
||||
distance:
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
as3935_spi:
|
||||
id: as3935_spi_id
|
||||
cs_pin: ${cs_pin}
|
||||
irq_pin: ${irq_pin}
|
||||
|
||||
binary_sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_spi_id
|
||||
name: Storm Alert
|
||||
|
||||
sensor:
|
||||
- platform: as3935
|
||||
as3935_id: as3935_spi_id
|
||||
lightning_energy:
|
||||
name: Lightning Energy
|
||||
distance:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
display:
|
||||
- platform: ssd1306_i2c
|
||||
i2c_id: i2c_bus
|
||||
id: ssd1306_display
|
||||
id: ssd1306_i2c_display
|
||||
model: SSD1306_128X64
|
||||
reset_pin: 19
|
||||
pages:
|
||||
@@ -13,6 +13,6 @@ touchscreen:
|
||||
- platform: axs15231
|
||||
i2c_id: i2c_bus
|
||||
id: axs15231_touchscreen
|
||||
display: ssd1306_display
|
||||
display: ssd1306_i2c_display
|
||||
interrupt_pin: 20
|
||||
reset_pin: 18
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
remote_transmitter:
|
||||
pin: ${pin}
|
||||
carrier_duty_percent: 50%
|
||||
|
||||
climate:
|
||||
- platform: heatpumpir
|
||||
protocol: ballu
|
||||
@@ -10,3 +6,4 @@ climate:
|
||||
name: HeatpumpIR Climate
|
||||
min_temperature: 18
|
||||
max_temperature: 30
|
||||
transmitter_id: xmitr
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user