mirror of
https://github.com/esphome/esphome.git
synced 2026-02-08 16:51:52 +00:00
Compare commits
445 Commits
2026.1.0b3
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41fedaedb3 | ||
|
|
7b40e8afcb | ||
|
|
a43e3e5948 | ||
|
|
9de91539e6 | ||
|
|
eb7aa3420f | ||
|
|
86f91eed2f | ||
|
|
41cecbfb0f | ||
|
|
9315da79bc | ||
|
|
155447f541 | ||
|
|
238e40966f | ||
|
|
f9192b5f75 | ||
|
|
2917057da8 | ||
|
|
c7c9ffe7e1 | ||
|
|
368ef5687b | ||
|
|
b7dc975331 | ||
|
|
44f308502e | ||
|
|
ec477801ca | ||
|
|
c3622ef7fb | ||
|
|
e4ad2082bc | ||
|
|
7afd0eb1aa | ||
|
|
112a2c5d92 | ||
|
|
fef5d3f88f | ||
|
|
8e461db301 | ||
|
|
6decdfad26 | ||
|
|
c7729cb019 | ||
|
|
ed4f00d4a3 | ||
|
|
55ef8393af | ||
|
|
081f953dc3 | ||
|
|
f4e410f47f | ||
|
|
bbdb202e2c | ||
|
|
9ea8461440 | ||
|
|
ed8c0dc99d | ||
|
|
be44d4801f | ||
|
|
7bd8b08e16 | ||
|
|
c27870b15d | ||
|
|
25c0073b2d | ||
|
|
a556824875 | ||
|
|
89fc5ebc97 | ||
|
|
67dfa5e2bc | ||
|
|
13ddf267bb | ||
|
|
43d9d6fe64 | ||
|
|
4a579700a0 | ||
|
|
c1b412d5f3 | ||
|
|
becb6559f1 | ||
|
|
36f2654fa6 | ||
|
|
ba18a8b3e3 | ||
|
|
ab8ac72c4f | ||
|
|
1b3c9aa98e | ||
|
|
bafbd4235a | ||
|
|
900aab45f1 | ||
|
|
bc41d25657 | ||
|
|
094d64f872 | ||
|
|
b085585461 | ||
|
|
49ef4e00df | ||
|
|
8314ad9ca0 | ||
|
|
5544f0d346 | ||
|
|
5dc8bfe95e | ||
|
|
4d05cd3059 | ||
|
|
2541ec1565 | ||
|
|
95f39149d7 | ||
|
|
e6bae1a97e | ||
|
|
f11b8615da | ||
|
|
5d4bde98dc | ||
|
|
b8b072cf86 | ||
|
|
18f7e0e6b3 | ||
|
|
8d0ce49eb4 | ||
|
|
21bd0ff6aa | ||
|
|
d0017ded5b | ||
|
|
f4d7d06c41 | ||
|
|
c027d9116f | ||
|
|
b3e09e5c68 | ||
|
|
d4110bf650 | ||
|
|
ff6f7d3248 | ||
|
|
a430b3a426 | ||
|
|
fbeb0e8e54 | ||
|
|
9d63642bdb | ||
|
|
8cb701e412 | ||
|
|
d41c84d624 | ||
|
|
9f1a427ce2 | ||
|
|
ae71f07abb | ||
|
|
ccf5c1f7e9 | ||
|
|
efecea9450 | ||
|
|
26e4cda610 | ||
|
|
a6543d32bd | ||
|
|
da947d060f | ||
|
|
1119003eb5 | ||
|
|
c089d9aeac | ||
|
|
4f0894e970 | ||
|
|
848c237159 | ||
|
|
6892805094 | ||
|
|
aa8ccfc32b | ||
|
|
18991686ab | ||
|
|
62f34bea83 | ||
|
|
6114005952 | ||
|
|
c0e5ae4298 | ||
|
|
420de987bc | ||
|
|
61e33217cd | ||
|
|
b5b9a89561 | ||
|
|
bc9fc66225 | ||
|
|
6727fe9040 | ||
|
|
56110d4495 | ||
|
|
1362ff6cba | ||
|
|
dbd7401721 | ||
|
|
f0801ecac0 | ||
|
|
379652f631 | ||
|
|
18c152723c | ||
|
|
09b76d5e4a | ||
|
|
8791c24072 | ||
|
|
652c02b9ab | ||
|
|
4ab552d750 | ||
|
|
e420964b93 | ||
|
|
7d717a78dc | ||
|
|
2f0abd5c3f | ||
|
|
d49d8095df | ||
|
|
8a8c1290db | ||
|
|
01ffeba2c2 | ||
|
|
78ed898f0b | ||
|
|
75ee9a718a | ||
|
|
bfeb447178 | ||
|
|
29f8d70b35 | ||
|
|
1ff2f3b6a3 | ||
|
|
891382a32e | ||
|
|
0fd50b2381 | ||
|
|
9dcb469460 | ||
|
|
5e3561d60b | ||
|
|
ca9ed369f9 | ||
|
|
4e96b20b46 | ||
|
|
a1a60c44da | ||
|
|
898c8a5836 | ||
|
|
20edd11ca7 | ||
|
|
9a8c71a58b | ||
|
|
1a7435250e | ||
|
|
3c91d72403 | ||
|
|
0a63fc6f05 | ||
|
|
50e739ee8e | ||
|
|
6c84f20491 | ||
|
|
a68506f924 | ||
|
|
a20d42ca0b | ||
|
|
4ec8846198 | ||
|
|
40ea65b1c0 | ||
|
|
f7937ef952 | ||
|
|
d6bf137026 | ||
|
|
ed9a672f44 | ||
|
|
823b5ac1ab | ||
|
|
6de2049076 | ||
|
|
cd43f8474e | ||
|
|
ecc0b366b3 | ||
|
|
6a17db8857 | ||
|
|
0843ec6ae8 | ||
|
|
74c84c8747 | ||
|
|
3e9a6c582e | ||
|
|
084113926c | ||
|
|
a5f60750c2 | ||
|
|
a382383d83 | ||
|
|
03cfd87b16 | ||
|
|
6d8294c2d3 | ||
|
|
6a3205f4db | ||
|
|
6f22509883 | ||
|
|
455ade0dca | ||
|
|
87fcfc9d76 | ||
|
|
d86048cc2d | ||
|
|
e1355de4cb | ||
|
|
7385c4cf3d | ||
|
|
3bd6ec4ec7 | ||
|
|
051604f284 | ||
|
|
10dfd95ff2 | ||
|
|
22e0a8ce2e | ||
|
|
b4f63fd992 | ||
|
|
ded835ab63 | ||
|
|
73a249c075 | ||
|
|
fe6f27c526 | ||
|
|
f73c539ea7 | ||
|
|
f87aa384d0 | ||
|
|
f9687a2a31 | ||
|
|
f084d320fc | ||
|
|
f93382445e | ||
|
|
463363a08d | ||
|
|
a0790f926e | ||
|
|
ca59ab8f37 | ||
|
|
b2474c6de9 | ||
|
|
3aaf10b6a8 | ||
|
|
33f545a8e3 | ||
|
|
d056e1040b | ||
|
|
75a78b2bf3 | ||
|
|
cd6314dc96 | ||
|
|
f91bffff9a | ||
|
|
5cbe9af485 | ||
|
|
a7fbecb25c | ||
|
|
bf92d94863 | ||
|
|
9c3817f544 | ||
|
|
ee9e3315b6 | ||
|
|
67dea1e538 | ||
|
|
003b9c6c3f | ||
|
|
2f1a345905 | ||
|
|
7ef933abec | ||
|
|
4ddd40bcfb | ||
|
|
8ae901b3f1 | ||
|
|
bc49174920 | ||
|
|
123ee02d39 | ||
|
|
0cc8055757 | ||
|
|
27a212c14d | ||
|
|
65dc182526 | ||
|
|
dd91039ff1 | ||
|
|
1c9a9c7536 | ||
|
|
011407ea8b | ||
|
|
1141e83a7c | ||
|
|
214ce95cf3 | ||
|
|
3a7b83ba93 | ||
|
|
cc2f3d85dc | ||
|
|
723f67d5e2 | ||
|
|
70e45706d9 | ||
|
|
56a2a2269f | ||
|
|
d6841ba33a | ||
|
|
10cbd0164a | ||
|
|
d285706b41 | ||
|
|
ef469c20df | ||
|
|
6870d3dc50 | ||
|
|
9cc39621a6 | ||
|
|
c4f7d09553 | ||
|
|
ab1661ef22 | ||
|
|
ccbf17d5ab | ||
|
|
bac96086be | ||
|
|
c32e4bc65b | ||
|
|
993765d732 | ||
|
|
8d84fe0113 | ||
|
|
58746b737f | ||
|
|
f93e843972 | ||
|
|
60968d311b | ||
|
|
30584e2e96 | ||
|
|
468ae39a9e | ||
|
|
beb9c8d328 | ||
|
|
cdda3fb7cc | ||
|
|
bba00a3906 | ||
|
|
42e50ca178 | ||
|
|
165e362a1b | ||
|
|
e4763f8e71 | ||
|
|
9fddd0659e | ||
|
|
faea546a0e | ||
|
|
069db2e128 | ||
|
|
5f2203b915 | ||
|
|
5c67e04fef | ||
|
|
0cdcacc7fc | ||
|
|
cfb61bc50a | ||
|
|
547c985672 | ||
|
|
44e624d7a7 | ||
|
|
5779e3e6e4 | ||
|
|
3184717607 | ||
|
|
e8972c65c8 | ||
|
|
71cda05073 | ||
|
|
3dbebb728d | ||
|
|
f938de16af | ||
|
|
ec791063b3 | ||
|
|
fb984cd052 | ||
|
|
85181779d1 | ||
|
|
95b23702e4 | ||
|
|
95eebcd74f | ||
|
|
3c3d5c2fca | ||
|
|
811ac81320 | ||
|
|
f01bd68a4b | ||
|
|
5433c0f707 | ||
|
|
b06cce9eeb | ||
|
|
65bcfee035 | ||
|
|
9261b9ecaa | ||
|
|
6725e6c01e | ||
|
|
effbcece49 | ||
|
|
98a926f37f | ||
|
|
110c173eac | ||
|
|
6008abae62 | ||
|
|
04e102f344 | ||
|
|
bb67b1ca1e | ||
|
|
6d7956a062 | ||
|
|
afbbdd1492 | ||
|
|
b06568c132 | ||
|
|
3c5fc638d5 | ||
|
|
ddb762f8f5 | ||
|
|
4ac7fe84b4 | ||
|
|
d6a41ed51e | ||
|
|
8d1379a275 | ||
|
|
5bbf9153ca | ||
|
|
a1c4d56268 | ||
|
|
a9ce3df04c | ||
|
|
99aa83564e | ||
|
|
aa5092bdc2 | ||
|
|
645832a070 | ||
|
|
19c1d3aee7 | ||
|
|
ce5ec7a78f | ||
|
|
ebf589560d | ||
|
|
8dd1aec606 | ||
|
|
9d967b01c8 | ||
|
|
11e0d536e4 | ||
|
|
673f46f761 | ||
|
|
4abae8d445 | ||
|
|
e62368e058 | ||
|
|
5345c96ff3 | ||
|
|
333ace25c9 | ||
|
|
6014bba3d1 | ||
|
|
5f2394ef80 | ||
|
|
29555c0ddc | ||
|
|
37eaf10f75 | ||
|
|
0b60fd0c8c | ||
|
|
fc16ad806a | ||
|
|
7e43abd86f | ||
|
|
7a2734fae9 | ||
|
|
346f3d38d5 | ||
|
|
fbde91358c | ||
|
|
54d6825323 | ||
|
|
307c3e1061 | ||
|
|
df74d307c8 | ||
|
|
acdc7bd892 | ||
|
|
1095bde2db | ||
|
|
258b73d7f6 | ||
|
|
31608543c2 | ||
|
|
41a060668c | ||
|
|
6bad697fc6 | ||
|
|
3ca5e5e4e4 | ||
|
|
cd4cb8b3ec | ||
|
|
1f3a0490a7 | ||
|
|
b08d871add | ||
|
|
15f0986a59 | ||
|
|
90edf32acf | ||
|
|
3c0f43db9e | ||
|
|
6edecd3d45 | ||
|
|
055c00f1ac | ||
|
|
7dc40881e2 | ||
|
|
b04373687e | ||
|
|
b89c127f62 | ||
|
|
47dc5d0a1f | ||
|
|
21886dd3ac | ||
|
|
85a5a26519 | ||
|
|
79ccacd6d6 | ||
|
|
e2319ba651 | ||
|
|
ed4ebffa74 | ||
|
|
c213de4861 | ||
|
|
6cf320fd60 | ||
|
|
aeea340bc6 | ||
|
|
d0e50ed030 | ||
|
|
280d460025 | ||
|
|
ea70faf642 | ||
|
|
5d7b38b261 | ||
|
|
e88093ca60 | ||
|
|
b48d4ab785 | ||
|
|
8ade9dfc10 | ||
|
|
4e0e7796de | ||
|
|
62b6c9bf7c | ||
|
|
b5fe271d6b | ||
|
|
5d787e2512 | ||
|
|
8998ef0bc3 | ||
|
|
8ec31dd769 | ||
|
|
0193464f92 | ||
|
|
1996bc425f | ||
|
|
a0d3d54d69 | ||
|
|
ee264d0fd4 | ||
|
|
892e9b006f | ||
|
|
f8bd4ef57d | ||
|
|
bfcc0e26a3 | ||
|
|
86a1b4cf69 | ||
|
|
d8a28f6fba | ||
|
|
e80a940222 | ||
|
|
e99dbe05f7 | ||
|
|
f453a8d9a1 | ||
|
|
126190d26a | ||
|
|
e40201a98d | ||
|
|
8142f5db44 | ||
|
|
98ccab87a7 | ||
|
|
b9e72a8774 | ||
|
|
d9fc625c6a | ||
|
|
dfbf79d6d6 | ||
|
|
ea0fac96cb | ||
|
|
3182222d60 | ||
|
|
d8849b16f2 | ||
|
|
635983f163 | ||
|
|
6cbe672004 | ||
|
|
226867b05c | ||
|
|
67871a1683 | ||
|
|
f60c03e350 | ||
|
|
eb66429144 | ||
|
|
0f3bac5dd6 | ||
|
|
5b92d0b89e | ||
|
|
052b05df56 | ||
|
|
7b0db659d1 | ||
|
|
2f7270cf8f | ||
|
|
b44727aee6 | ||
|
|
1a55254258 | ||
|
|
baf2b0e3c9 | ||
|
|
680e92a226 | ||
|
|
db0b32bfc9 | ||
|
|
21794e28e5 | ||
|
|
728236270c | ||
|
|
01cdc4ed58 | ||
|
|
d6a0c8ffbb | ||
|
|
4cc0f874f7 | ||
|
|
ed58b9372f | ||
|
|
ee2a81923b | ||
|
|
0a1e7ee50b | ||
|
|
4d4283bcfa | ||
|
|
e4fb6988ff | ||
|
|
d31b733dce | ||
|
|
b25a2f8d8e | ||
|
|
3f892711c7 | ||
|
|
798d3bd956 | ||
|
|
d830787c71 | ||
|
|
1f4221abfa | ||
|
|
92808a09c7 | ||
|
|
e54d5ee898 | ||
|
|
bbe1155518 | ||
|
|
69d7b6e921 | ||
|
|
510c874061 | ||
|
|
f7ad324d81 | ||
|
|
58a9e30017 | ||
|
|
52ac9e1861 | ||
|
|
c5e4a60884 | ||
|
|
a680884138 | ||
|
|
6832efbacc | ||
|
|
3057a0484f | ||
|
|
bc78f80f77 | ||
|
|
916b028fb2 | ||
|
|
16adae7359 | ||
|
|
4906f87751 | ||
|
|
5b37d2fb27 | ||
|
|
68affe0b9c | ||
|
|
8263a8273f | ||
|
|
14b7539094 | ||
|
|
b37cb812a7 | ||
|
|
42491569c8 | ||
|
|
b1230ec6bb | ||
|
|
4eda9e965f | ||
|
|
d2528af649 | ||
|
|
2eabc1b96b | ||
|
|
535c3eb2a2 | ||
|
|
20f937692e | ||
|
|
00cc9e44b6 | ||
|
|
0427350101 | ||
|
|
41dceb76ec | ||
|
|
6380458d78 | ||
|
|
0dc5a7c9a4 | ||
|
|
9003844eda | ||
|
|
22a4ec69c2 | ||
|
|
9d42bfd161 | ||
|
|
49c881d067 | ||
|
|
78aee4f498 | ||
|
|
9da2c08f36 | ||
|
|
03f3deff41 | ||
|
|
f1e5d3a39a | ||
|
|
2f6863230d | ||
|
|
f44036310c |
@@ -1 +1 @@
|
|||||||
d272a88e8ca28ae9340a9a03295a566432a52cb696501908f57764475bf7ca65
|
37ec8d5a343c8d0a485fd2118cbdabcbccd7b9bca197e4a392be75087974dced
|
||||||
|
|||||||
96
.claude/skills/pr-workflow/SKILL.md
Normal file
96
.claude/skills/pr-workflow/SKILL.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
---
|
||||||
|
name: pr-workflow
|
||||||
|
description: Create pull requests for esphome. Use when creating PRs, submitting changes, or preparing contributions.
|
||||||
|
allowed-tools: Read, Bash, Glob, Grep
|
||||||
|
---
|
||||||
|
|
||||||
|
# ESPHome PR Workflow
|
||||||
|
|
||||||
|
When creating a pull request for esphome, follow these steps:
|
||||||
|
|
||||||
|
## 1. Create Branch from Upstream
|
||||||
|
|
||||||
|
Always base your branch on **upstream** (not origin/fork) to ensure you have the latest code:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git fetch upstream
|
||||||
|
git checkout -b <branch-name> upstream/dev
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. Read the PR Template
|
||||||
|
|
||||||
|
Before creating a PR, read `.github/PULL_REQUEST_TEMPLATE.md` to understand required fields.
|
||||||
|
|
||||||
|
## 3. Create the PR
|
||||||
|
|
||||||
|
Use `gh pr create` with the **full template** filled in. Never skip or abbreviate sections.
|
||||||
|
|
||||||
|
Required fields:
|
||||||
|
- **What does this implement/fix?**: Brief description of changes
|
||||||
|
- **Types of changes**: Check ONE appropriate box (Bugfix, New feature, Breaking change, etc.)
|
||||||
|
- **Related issue**: Use `fixes <link>` syntax if applicable
|
||||||
|
- **Pull request in esphome-docs**: Link if docs are needed
|
||||||
|
- **Test Environment**: Check platforms you tested on
|
||||||
|
- **Example config.yaml**: Include working example YAML
|
||||||
|
- **Checklist**: Verify code is tested and tests added
|
||||||
|
|
||||||
|
## 4. Example PR Body
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# What does this implement/fix?
|
||||||
|
|
||||||
|
<describe your changes here>
|
||||||
|
|
||||||
|
## Types of changes
|
||||||
|
|
||||||
|
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||||
|
- [x] New feature (non-breaking change which adds functionality)
|
||||||
|
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||||
|
- [ ] Developer breaking change (an API change that could break external components)
|
||||||
|
- [ ] Code quality improvements to existing code or addition of tests
|
||||||
|
- [ ] Other
|
||||||
|
|
||||||
|
**Related issue or feature (if applicable):**
|
||||||
|
|
||||||
|
- fixes https://github.com/esphome/esphome/issues/XXX
|
||||||
|
|
||||||
|
**Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):**
|
||||||
|
|
||||||
|
- esphome/esphome-docs#XXX
|
||||||
|
|
||||||
|
## Test Environment
|
||||||
|
|
||||||
|
- [x] ESP32
|
||||||
|
- [x] ESP32 IDF
|
||||||
|
- [ ] ESP8266
|
||||||
|
- [ ] RP2040
|
||||||
|
- [ ] BK72xx
|
||||||
|
- [ ] RTL87xx
|
||||||
|
- [ ] LN882x
|
||||||
|
- [ ] nRF52840
|
||||||
|
|
||||||
|
## Example entry for `config.yaml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Example config.yaml
|
||||||
|
component_name:
|
||||||
|
id: my_component
|
||||||
|
option: value
|
||||||
|
```
|
||||||
|
|
||||||
|
## Checklist:
|
||||||
|
- [x] The code change is tested and works locally.
|
||||||
|
- [x] Tests have been added to verify that the new code works (under `tests/` folder).
|
||||||
|
|
||||||
|
If user exposed functionality or configuration variables are added/changed:
|
||||||
|
- [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
|
||||||
|
```
|
||||||
|
|
||||||
|
## 5. Push and Create PR
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git push -u origin <branch-name>
|
||||||
|
gh pr create --repo esphome/esphome --base dev --title "[component] Brief description"
|
||||||
|
```
|
||||||
|
|
||||||
|
Title should be prefixed with the component name in brackets, e.g. `[safe_mode] Add feature`.
|
||||||
4
.github/actions/restore-python/action.yml
vendored
4
.github/actions/restore-python/action.yml
vendored
@@ -17,12 +17,12 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Set up Python ${{ inputs.python-version }}
|
- name: Set up Python ${{ inputs.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ inputs.python-version }}
|
python-version: ${{ inputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
# yamllint disable-line rule:line-length
|
# yamllint disable-line rule:line-length
|
||||||
|
|||||||
38
.github/scripts/auto-label-pr/constants.js
vendored
Normal file
38
.github/scripts/auto-label-pr/constants.js
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// Constants and markers for PR auto-labeling
|
||||||
|
module.exports = {
|
||||||
|
BOT_COMMENT_MARKER: '<!-- auto-label-pr-bot -->',
|
||||||
|
CODEOWNERS_MARKER: '<!-- codeowners-request -->',
|
||||||
|
TOO_BIG_MARKER: '<!-- too-big-request -->',
|
||||||
|
DEPRECATED_COMPONENT_MARKER: '<!-- deprecated-component-request -->',
|
||||||
|
|
||||||
|
MANAGED_LABELS: [
|
||||||
|
'new-component',
|
||||||
|
'new-platform',
|
||||||
|
'new-target-platform',
|
||||||
|
'merging-to-release',
|
||||||
|
'merging-to-beta',
|
||||||
|
'chained-pr',
|
||||||
|
'core',
|
||||||
|
'small-pr',
|
||||||
|
'dashboard',
|
||||||
|
'github-actions',
|
||||||
|
'by-code-owner',
|
||||||
|
'has-tests',
|
||||||
|
'needs-tests',
|
||||||
|
'needs-docs',
|
||||||
|
'needs-codeowners',
|
||||||
|
'too-big',
|
||||||
|
'labeller-recheck',
|
||||||
|
'bugfix',
|
||||||
|
'new-feature',
|
||||||
|
'breaking-change',
|
||||||
|
'developer-breaking-change',
|
||||||
|
'code-quality',
|
||||||
|
'deprecated-component'
|
||||||
|
],
|
||||||
|
|
||||||
|
DOCS_PR_PATTERNS: [
|
||||||
|
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
||||||
|
/esphome\/esphome-docs#\d+/
|
||||||
|
]
|
||||||
|
};
|
||||||
373
.github/scripts/auto-label-pr/detectors.js
vendored
Normal file
373
.github/scripts/auto-label-pr/detectors.js
vendored
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const { DOCS_PR_PATTERNS } = require('./constants');
|
||||||
|
|
||||||
|
// Strategy: Merge branch detection
|
||||||
|
async function detectMergeBranch(context) {
|
||||||
|
const labels = new Set();
|
||||||
|
const baseRef = context.payload.pull_request.base.ref;
|
||||||
|
|
||||||
|
if (baseRef === 'release') {
|
||||||
|
labels.add('merging-to-release');
|
||||||
|
} else if (baseRef === 'beta') {
|
||||||
|
labels.add('merging-to-beta');
|
||||||
|
} else if (baseRef !== 'dev') {
|
||||||
|
labels.add('chained-pr');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Component and platform labeling
|
||||||
|
async function detectComponentPlatforms(changedFiles, apiData) {
|
||||||
|
const labels = new Set();
|
||||||
|
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
||||||
|
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
||||||
|
|
||||||
|
for (const file of changedFiles) {
|
||||||
|
const componentMatch = file.match(componentRegex);
|
||||||
|
if (componentMatch) {
|
||||||
|
labels.add(`component: ${componentMatch[1]}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const platformMatch = file.match(targetPlatformRegex);
|
||||||
|
if (platformMatch) {
|
||||||
|
labels.add(`platform: ${platformMatch[1]}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: New component detection
|
||||||
|
async function detectNewComponents(prFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||||
|
|
||||||
|
for (const file of addedFiles) {
|
||||||
|
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
||||||
|
if (componentMatch) {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(file, 'utf8');
|
||||||
|
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
||||||
|
labels.add('new-target-platform');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Failed to read content of ${file}:`, error.message);
|
||||||
|
}
|
||||||
|
labels.add('new-component');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: New platform detection
|
||||||
|
async function detectNewPlatforms(prFiles, apiData) {
|
||||||
|
const labels = new Set();
|
||||||
|
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
||||||
|
|
||||||
|
for (const file of addedFiles) {
|
||||||
|
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
||||||
|
if (platformFileMatch) {
|
||||||
|
const [, component, platform] = platformFileMatch;
|
||||||
|
if (apiData.platformComponents.includes(platform)) {
|
||||||
|
labels.add('new-platform');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
||||||
|
if (platformDirMatch) {
|
||||||
|
const [, component, platform] = platformDirMatch;
|
||||||
|
if (apiData.platformComponents.includes(platform)) {
|
||||||
|
labels.add('new-platform');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Core files detection
|
||||||
|
async function detectCoreChanges(changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const coreFiles = changedFiles.filter(file =>
|
||||||
|
file.startsWith('esphome/core/') ||
|
||||||
|
(file.startsWith('esphome/') && file.split('/').length === 2)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (coreFiles.length > 0) {
|
||||||
|
labels.add('core');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: PR size detection
|
||||||
|
async function detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD) {
|
||||||
|
const labels = new Set();
|
||||||
|
|
||||||
|
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
||||||
|
labels.add('small-pr');
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testAdditions = prFiles
|
||||||
|
.filter(file => file.filename.startsWith('tests/'))
|
||||||
|
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||||
|
const testDeletions = prFiles
|
||||||
|
.filter(file => file.filename.startsWith('tests/'))
|
||||||
|
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||||
|
|
||||||
|
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||||
|
|
||||||
|
// Don't add too-big if mega-pr label is already present
|
||||||
|
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
||||||
|
labels.add('too-big');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Dashboard changes
|
||||||
|
async function detectDashboardChanges(changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const dashboardFiles = changedFiles.filter(file =>
|
||||||
|
file.startsWith('esphome/dashboard/') ||
|
||||||
|
file.startsWith('esphome/components/dashboard_import/')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (dashboardFiles.length > 0) {
|
||||||
|
labels.add('dashboard');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: GitHub Actions changes
|
||||||
|
async function detectGitHubActionsChanges(changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const githubActionsFiles = changedFiles.filter(file =>
|
||||||
|
file.startsWith('.github/workflows/')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (githubActionsFiles.length > 0) {
|
||||||
|
labels.add('github-actions');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Code owner detection
|
||||||
|
async function detectCodeOwner(github, context, changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { data: codeownersFile } = await github.rest.repos.getContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
path: 'CODEOWNERS',
|
||||||
|
});
|
||||||
|
|
||||||
|
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
||||||
|
const prAuthor = context.payload.pull_request.user.login;
|
||||||
|
|
||||||
|
const codeownersLines = codeownersContent.split('\n')
|
||||||
|
.map(line => line.trim())
|
||||||
|
.filter(line => line && !line.startsWith('#'));
|
||||||
|
|
||||||
|
const codeownersRegexes = codeownersLines.map(line => {
|
||||||
|
const parts = line.split(/\s+/);
|
||||||
|
const pattern = parts[0];
|
||||||
|
const owners = parts.slice(1);
|
||||||
|
|
||||||
|
let regex;
|
||||||
|
if (pattern.endsWith('*')) {
|
||||||
|
const dir = pattern.slice(0, -1);
|
||||||
|
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
||||||
|
} else if (pattern.includes('*')) {
|
||||||
|
// First escape all regex special chars except *, then replace * with .*
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
||||||
|
.replace(/\*/g, '.*');
|
||||||
|
regex = new RegExp(`^${regexPattern}$`);
|
||||||
|
} else {
|
||||||
|
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { regex, owners };
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const file of changedFiles) {
|
||||||
|
for (const { regex, owners } of codeownersRegexes) {
|
||||||
|
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
||||||
|
labels.add('by-code-owner');
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Test detection
|
||||||
|
async function detectTests(changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
||||||
|
|
||||||
|
if (testFiles.length > 0) {
|
||||||
|
labels.add('has-tests');
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: PR Template Checkbox detection
|
||||||
|
async function detectPRTemplateCheckboxes(context) {
|
||||||
|
const labels = new Set();
|
||||||
|
const prBody = context.payload.pull_request.body || '';
|
||||||
|
|
||||||
|
console.log('Checking PR template checkboxes...');
|
||||||
|
|
||||||
|
// Check for checked checkboxes in the "Types of changes" section
|
||||||
|
const checkboxPatterns = [
|
||||||
|
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
||||||
|
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
||||||
|
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
||||||
|
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
||||||
|
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const { pattern, label } of checkboxPatterns) {
|
||||||
|
if (pattern.test(prBody)) {
|
||||||
|
console.log(`Found checked checkbox for: ${label}`);
|
||||||
|
labels.add(label);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Deprecated component detection
|
||||||
|
async function detectDeprecatedComponents(github, context, changedFiles) {
|
||||||
|
const labels = new Set();
|
||||||
|
const deprecatedInfo = [];
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
|
||||||
|
// Compile regex once for better performance
|
||||||
|
const componentFileRegex = /^esphome\/components\/([^\/]+)\//;
|
||||||
|
|
||||||
|
// Get files that are modified or added in components directory
|
||||||
|
const componentFiles = changedFiles.filter(file => componentFileRegex.test(file));
|
||||||
|
|
||||||
|
if (componentFiles.length === 0) {
|
||||||
|
return { labels, deprecatedInfo };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract unique component names using the same regex
|
||||||
|
const components = new Set();
|
||||||
|
for (const file of componentFiles) {
|
||||||
|
const match = file.match(componentFileRegex);
|
||||||
|
if (match) {
|
||||||
|
components.add(match[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get PR head to fetch files from the PR branch
|
||||||
|
const prNumber = context.payload.pull_request.number;
|
||||||
|
|
||||||
|
// Check each component's __init__.py for DEPRECATED_COMPONENT constant
|
||||||
|
for (const component of components) {
|
||||||
|
const initFile = `esphome/components/${component}/__init__.py`;
|
||||||
|
try {
|
||||||
|
// Fetch file content from PR head using GitHub API
|
||||||
|
const { data: fileData } = await github.rest.repos.getContent({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
path: initFile,
|
||||||
|
ref: `refs/pull/${prNumber}/head`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Decode base64 content
|
||||||
|
const content = Buffer.from(fileData.content, 'base64').toString('utf8');
|
||||||
|
|
||||||
|
// Look for DEPRECATED_COMPONENT = "message" or DEPRECATED_COMPONENT = 'message'
|
||||||
|
// Support single quotes, double quotes, and triple quotes (for multiline)
|
||||||
|
const doubleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*"""([\s\S]*?)"""/s) ||
|
||||||
|
content.match(/DEPRECATED_COMPONENT\s*=\s*"((?:[^"\\]|\\.)*)"/);
|
||||||
|
const singleQuoteMatch = content.match(/DEPRECATED_COMPONENT\s*=\s*'''([\s\S]*?)'''/s) ||
|
||||||
|
content.match(/DEPRECATED_COMPONENT\s*=\s*'((?:[^'\\]|\\.)*)'/);
|
||||||
|
const deprecatedMatch = doubleQuoteMatch || singleQuoteMatch;
|
||||||
|
|
||||||
|
if (deprecatedMatch) {
|
||||||
|
labels.add('deprecated-component');
|
||||||
|
deprecatedInfo.push({
|
||||||
|
component: component,
|
||||||
|
message: deprecatedMatch[1].trim()
|
||||||
|
});
|
||||||
|
console.log(`Found deprecated component: ${component}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Only log if it's not a simple "file not found" error (404)
|
||||||
|
if (error.status !== 404) {
|
||||||
|
console.log(`Error reading ${initFile}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { labels, deprecatedInfo };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strategy: Requirements detection
|
||||||
|
async function detectRequirements(allLabels, prFiles, context) {
|
||||||
|
const labels = new Set();
|
||||||
|
|
||||||
|
// Check for missing tests
|
||||||
|
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
||||||
|
labels.add('needs-tests');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for missing docs
|
||||||
|
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
||||||
|
const prBody = context.payload.pull_request.body || '';
|
||||||
|
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
||||||
|
|
||||||
|
if (!hasDocsLink) {
|
||||||
|
labels.add('needs-docs');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for missing CODEOWNERS
|
||||||
|
if (allLabels.has('new-component')) {
|
||||||
|
const codeownersModified = prFiles.some(file =>
|
||||||
|
file.filename === 'CODEOWNERS' &&
|
||||||
|
(file.status === 'modified' || file.status === 'added') &&
|
||||||
|
(file.additions || 0) > 0
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!codeownersModified) {
|
||||||
|
labels.add('needs-codeowners');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labels;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
detectMergeBranch,
|
||||||
|
detectComponentPlatforms,
|
||||||
|
detectNewComponents,
|
||||||
|
detectNewPlatforms,
|
||||||
|
detectCoreChanges,
|
||||||
|
detectPRSize,
|
||||||
|
detectDashboardChanges,
|
||||||
|
detectGitHubActionsChanges,
|
||||||
|
detectCodeOwner,
|
||||||
|
detectTests,
|
||||||
|
detectPRTemplateCheckboxes,
|
||||||
|
detectDeprecatedComponents,
|
||||||
|
detectRequirements
|
||||||
|
};
|
||||||
187
.github/scripts/auto-label-pr/index.js
vendored
Normal file
187
.github/scripts/auto-label-pr/index.js
vendored
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
const { MANAGED_LABELS } = require('./constants');
|
||||||
|
const {
|
||||||
|
detectMergeBranch,
|
||||||
|
detectComponentPlatforms,
|
||||||
|
detectNewComponents,
|
||||||
|
detectNewPlatforms,
|
||||||
|
detectCoreChanges,
|
||||||
|
detectPRSize,
|
||||||
|
detectDashboardChanges,
|
||||||
|
detectGitHubActionsChanges,
|
||||||
|
detectCodeOwner,
|
||||||
|
detectTests,
|
||||||
|
detectPRTemplateCheckboxes,
|
||||||
|
detectDeprecatedComponents,
|
||||||
|
detectRequirements
|
||||||
|
} = require('./detectors');
|
||||||
|
const { handleReviews } = require('./reviews');
|
||||||
|
const { applyLabels, removeOldLabels } = require('./labels');
|
||||||
|
|
||||||
|
// Fetch API data
|
||||||
|
async function fetchApiData() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('https://data.esphome.io/components.json');
|
||||||
|
const componentsData = await response.json();
|
||||||
|
return {
|
||||||
|
targetPlatforms: componentsData.target_platforms || [],
|
||||||
|
platformComponents: componentsData.platform_components || []
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.log('Failed to fetch components data from API:', error.message);
|
||||||
|
return { targetPlatforms: [], platformComponents: [] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = async ({ github, context }) => {
|
||||||
|
// Environment variables
|
||||||
|
const SMALL_PR_THRESHOLD = parseInt(process.env.SMALL_PR_THRESHOLD);
|
||||||
|
const MAX_LABELS = parseInt(process.env.MAX_LABELS);
|
||||||
|
const TOO_BIG_THRESHOLD = parseInt(process.env.TOO_BIG_THRESHOLD);
|
||||||
|
const COMPONENT_LABEL_THRESHOLD = parseInt(process.env.COMPONENT_LABEL_THRESHOLD);
|
||||||
|
|
||||||
|
// Global state
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const pr_number = context.issue.number;
|
||||||
|
|
||||||
|
// Get current labels and PR data
|
||||||
|
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr_number
|
||||||
|
});
|
||||||
|
const currentLabels = currentLabelsData.map(label => label.name);
|
||||||
|
const managedLabels = currentLabels.filter(label =>
|
||||||
|
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check for mega-PR early - if present, skip most automatic labeling
|
||||||
|
const isMegaPR = currentLabels.includes('mega-pr');
|
||||||
|
|
||||||
|
// Get all PR files with automatic pagination
|
||||||
|
const prFiles = await github.paginate(
|
||||||
|
github.rest.pulls.listFiles,
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr_number
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Calculate data from PR files
|
||||||
|
const changedFiles = prFiles.map(file => file.filename);
|
||||||
|
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||||
|
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||||
|
const totalChanges = totalAdditions + totalDeletions;
|
||||||
|
|
||||||
|
console.log('Current labels:', currentLabels.join(', '));
|
||||||
|
console.log('Changed files:', changedFiles.length);
|
||||||
|
console.log('Total changes:', totalChanges);
|
||||||
|
if (isMegaPR) {
|
||||||
|
console.log('Mega-PR detected - applying limited labeling logic');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch API data
|
||||||
|
const apiData = await fetchApiData();
|
||||||
|
const baseRef = context.payload.pull_request.base.ref;
|
||||||
|
|
||||||
|
// Early exit for release and beta branches only
|
||||||
|
if (baseRef === 'release' || baseRef === 'beta') {
|
||||||
|
const branchLabels = await detectMergeBranch(context);
|
||||||
|
const finalLabels = Array.from(branchLabels);
|
||||||
|
|
||||||
|
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
||||||
|
|
||||||
|
// Apply labels
|
||||||
|
await applyLabels(github, context, finalLabels);
|
||||||
|
|
||||||
|
// Remove old managed labels
|
||||||
|
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run all strategies
|
||||||
|
const [
|
||||||
|
branchLabels,
|
||||||
|
componentLabels,
|
||||||
|
newComponentLabels,
|
||||||
|
newPlatformLabels,
|
||||||
|
coreLabels,
|
||||||
|
sizeLabels,
|
||||||
|
dashboardLabels,
|
||||||
|
actionsLabels,
|
||||||
|
codeOwnerLabels,
|
||||||
|
testLabels,
|
||||||
|
checkboxLabels,
|
||||||
|
deprecatedResult
|
||||||
|
] = await Promise.all([
|
||||||
|
detectMergeBranch(context),
|
||||||
|
detectComponentPlatforms(changedFiles, apiData),
|
||||||
|
detectNewComponents(prFiles),
|
||||||
|
detectNewPlatforms(prFiles, apiData),
|
||||||
|
detectCoreChanges(changedFiles),
|
||||||
|
detectPRSize(prFiles, totalAdditions, totalDeletions, totalChanges, isMegaPR, SMALL_PR_THRESHOLD, TOO_BIG_THRESHOLD),
|
||||||
|
detectDashboardChanges(changedFiles),
|
||||||
|
detectGitHubActionsChanges(changedFiles),
|
||||||
|
detectCodeOwner(github, context, changedFiles),
|
||||||
|
detectTests(changedFiles),
|
||||||
|
detectPRTemplateCheckboxes(context),
|
||||||
|
detectDeprecatedComponents(github, context, changedFiles)
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Extract deprecated component info
|
||||||
|
const deprecatedLabels = deprecatedResult.labels;
|
||||||
|
const deprecatedInfo = deprecatedResult.deprecatedInfo;
|
||||||
|
|
||||||
|
// Combine all labels
|
||||||
|
const allLabels = new Set([
|
||||||
|
...branchLabels,
|
||||||
|
...componentLabels,
|
||||||
|
...newComponentLabels,
|
||||||
|
...newPlatformLabels,
|
||||||
|
...coreLabels,
|
||||||
|
...sizeLabels,
|
||||||
|
...dashboardLabels,
|
||||||
|
...actionsLabels,
|
||||||
|
...codeOwnerLabels,
|
||||||
|
...testLabels,
|
||||||
|
...checkboxLabels,
|
||||||
|
...deprecatedLabels
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Detect requirements based on all other labels
|
||||||
|
const requirementLabels = await detectRequirements(allLabels, prFiles, context);
|
||||||
|
for (const label of requirementLabels) {
|
||||||
|
allLabels.add(label);
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalLabels = Array.from(allLabels);
|
||||||
|
|
||||||
|
// For mega-PRs, exclude component labels if there are too many
|
||||||
|
if (isMegaPR) {
|
||||||
|
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
||||||
|
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
||||||
|
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
||||||
|
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle too many labels (only for non-mega PRs)
|
||||||
|
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
||||||
|
const originalLabelCount = finalLabels.length;
|
||||||
|
|
||||||
|
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
||||||
|
finalLabels = ['too-big'];
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Computed labels:', finalLabels.join(', '));
|
||||||
|
|
||||||
|
// Handle reviews
|
||||||
|
await handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||||
|
|
||||||
|
// Apply labels
|
||||||
|
await applyLabels(github, context, finalLabels);
|
||||||
|
|
||||||
|
// Remove old managed labels
|
||||||
|
await removeOldLabels(github, context, managedLabels, finalLabels);
|
||||||
|
};
|
||||||
41
.github/scripts/auto-label-pr/labels.js
vendored
Normal file
41
.github/scripts/auto-label-pr/labels.js
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// Apply labels to PR
|
||||||
|
async function applyLabels(github, context, finalLabels) {
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const pr_number = context.issue.number;
|
||||||
|
|
||||||
|
if (finalLabels.length > 0) {
|
||||||
|
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
||||||
|
await github.rest.issues.addLabels({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr_number,
|
||||||
|
labels: finalLabels
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove old managed labels
|
||||||
|
async function removeOldLabels(github, context, managedLabels, finalLabels) {
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const pr_number = context.issue.number;
|
||||||
|
|
||||||
|
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
||||||
|
for (const label of labelsToRemove) {
|
||||||
|
console.log(`Removing label: ${label}`);
|
||||||
|
try {
|
||||||
|
await github.rest.issues.removeLabel({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
issue_number: pr_number,
|
||||||
|
name: label
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Failed to remove label ${label}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
applyLabels,
|
||||||
|
removeOldLabels
|
||||||
|
};
|
||||||
141
.github/scripts/auto-label-pr/reviews.js
vendored
Normal file
141
.github/scripts/auto-label-pr/reviews.js
vendored
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
const {
|
||||||
|
BOT_COMMENT_MARKER,
|
||||||
|
CODEOWNERS_MARKER,
|
||||||
|
TOO_BIG_MARKER,
|
||||||
|
DEPRECATED_COMPONENT_MARKER
|
||||||
|
} = require('./constants');
|
||||||
|
|
||||||
|
// Generate review messages
|
||||||
|
function generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||||
|
const messages = [];
|
||||||
|
|
||||||
|
// Deprecated component message
|
||||||
|
if (finalLabels.includes('deprecated-component') && deprecatedInfo && deprecatedInfo.length > 0) {
|
||||||
|
let message = `${DEPRECATED_COMPONENT_MARKER}\n### ⚠️ Deprecated Component\n\n`;
|
||||||
|
message += `Hey there @${prAuthor},\n`;
|
||||||
|
message += `This PR modifies one or more deprecated components. Please be aware:\n\n`;
|
||||||
|
|
||||||
|
for (const info of deprecatedInfo) {
|
||||||
|
message += `#### Component: \`${info.component}\`\n`;
|
||||||
|
message += `${info.message}\n\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
message += `Consider migrating to the recommended alternative if applicable.`;
|
||||||
|
|
||||||
|
messages.push(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Too big message
|
||||||
|
if (finalLabels.includes('too-big')) {
|
||||||
|
const testAdditions = prFiles
|
||||||
|
.filter(file => file.filename.startsWith('tests/'))
|
||||||
|
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
||||||
|
const testDeletions = prFiles
|
||||||
|
.filter(file => file.filename.startsWith('tests/'))
|
||||||
|
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
||||||
|
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
||||||
|
|
||||||
|
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
||||||
|
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
||||||
|
|
||||||
|
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
||||||
|
|
||||||
|
if (tooManyLabels && tooManyChanges) {
|
||||||
|
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
||||||
|
} else if (tooManyLabels) {
|
||||||
|
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
||||||
|
} else {
|
||||||
|
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
||||||
|
}
|
||||||
|
|
||||||
|
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
||||||
|
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
||||||
|
|
||||||
|
messages.push(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// CODEOWNERS message
|
||||||
|
if (finalLabels.includes('needs-codeowners')) {
|
||||||
|
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
||||||
|
`Hey there @${prAuthor},\n` +
|
||||||
|
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
||||||
|
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
||||||
|
`In \`__init__.py\` of the integration, please add:\n\n` +
|
||||||
|
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
||||||
|
`And run \`script/build_codeowners.py\``;
|
||||||
|
|
||||||
|
messages.push(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle reviews
|
||||||
|
async function handleReviews(github, context, finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, MAX_LABELS, TOO_BIG_THRESHOLD) {
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const pr_number = context.issue.number;
|
||||||
|
const prAuthor = context.payload.pull_request.user.login;
|
||||||
|
|
||||||
|
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount, deprecatedInfo, prFiles, totalAdditions, totalDeletions, prAuthor, MAX_LABELS, TOO_BIG_THRESHOLD);
|
||||||
|
const hasReviewableLabels = finalLabels.some(label =>
|
||||||
|
['too-big', 'needs-codeowners', 'deprecated-component'].includes(label)
|
||||||
|
);
|
||||||
|
|
||||||
|
const { data: reviews } = await github.rest.pulls.listReviews({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr_number
|
||||||
|
});
|
||||||
|
|
||||||
|
const botReviews = reviews.filter(review =>
|
||||||
|
review.user.type === 'Bot' &&
|
||||||
|
review.state === 'CHANGES_REQUESTED' &&
|
||||||
|
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (hasReviewableLabels) {
|
||||||
|
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
||||||
|
|
||||||
|
if (botReviews.length > 0) {
|
||||||
|
// Update existing review
|
||||||
|
await github.rest.pulls.updateReview({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr_number,
|
||||||
|
review_id: botReviews[0].id,
|
||||||
|
body: reviewBody
|
||||||
|
});
|
||||||
|
console.log('Updated existing bot review');
|
||||||
|
} else {
|
||||||
|
// Create new review
|
||||||
|
await github.rest.pulls.createReview({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr_number,
|
||||||
|
body: reviewBody,
|
||||||
|
event: 'REQUEST_CHANGES'
|
||||||
|
});
|
||||||
|
console.log('Created new bot review');
|
||||||
|
}
|
||||||
|
} else if (botReviews.length > 0) {
|
||||||
|
// Dismiss existing reviews
|
||||||
|
for (const review of botReviews) {
|
||||||
|
try {
|
||||||
|
await github.rest.pulls.dismissReview({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: pr_number,
|
||||||
|
review_id: review.id,
|
||||||
|
message: 'Review dismissed: All requirements have been met'
|
||||||
|
});
|
||||||
|
console.log(`Dismissed bot review ${review.id}`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
handleReviews
|
||||||
|
};
|
||||||
634
.github/workflows/auto-label-pr.yml
vendored
634
.github/workflows/auto-label-pr.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
|
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
@@ -36,633 +36,5 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
github-token: ${{ steps.generate-token.outputs.token }}
|
github-token: ${{ steps.generate-token.outputs.token }}
|
||||||
script: |
|
script: |
|
||||||
const fs = require('fs');
|
const script = require('./.github/scripts/auto-label-pr/index.js');
|
||||||
|
await script({ github, context });
|
||||||
// Constants
|
|
||||||
const SMALL_PR_THRESHOLD = parseInt('${{ env.SMALL_PR_THRESHOLD }}');
|
|
||||||
const MAX_LABELS = parseInt('${{ env.MAX_LABELS }}');
|
|
||||||
const TOO_BIG_THRESHOLD = parseInt('${{ env.TOO_BIG_THRESHOLD }}');
|
|
||||||
const COMPONENT_LABEL_THRESHOLD = parseInt('${{ env.COMPONENT_LABEL_THRESHOLD }}');
|
|
||||||
const BOT_COMMENT_MARKER = '<!-- auto-label-pr-bot -->';
|
|
||||||
const CODEOWNERS_MARKER = '<!-- codeowners-request -->';
|
|
||||||
const TOO_BIG_MARKER = '<!-- too-big-request -->';
|
|
||||||
|
|
||||||
const MANAGED_LABELS = [
|
|
||||||
'new-component',
|
|
||||||
'new-platform',
|
|
||||||
'new-target-platform',
|
|
||||||
'merging-to-release',
|
|
||||||
'merging-to-beta',
|
|
||||||
'chained-pr',
|
|
||||||
'core',
|
|
||||||
'small-pr',
|
|
||||||
'dashboard',
|
|
||||||
'github-actions',
|
|
||||||
'by-code-owner',
|
|
||||||
'has-tests',
|
|
||||||
'needs-tests',
|
|
||||||
'needs-docs',
|
|
||||||
'needs-codeowners',
|
|
||||||
'too-big',
|
|
||||||
'labeller-recheck',
|
|
||||||
'bugfix',
|
|
||||||
'new-feature',
|
|
||||||
'breaking-change',
|
|
||||||
'developer-breaking-change',
|
|
||||||
'code-quality'
|
|
||||||
];
|
|
||||||
|
|
||||||
const DOCS_PR_PATTERNS = [
|
|
||||||
/https:\/\/github\.com\/esphome\/esphome-docs\/pull\/\d+/,
|
|
||||||
/esphome\/esphome-docs#\d+/
|
|
||||||
];
|
|
||||||
|
|
||||||
// Global state
|
|
||||||
const { owner, repo } = context.repo;
|
|
||||||
const pr_number = context.issue.number;
|
|
||||||
|
|
||||||
// Get current labels and PR data
|
|
||||||
const { data: currentLabelsData } = await github.rest.issues.listLabelsOnIssue({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr_number
|
|
||||||
});
|
|
||||||
const currentLabels = currentLabelsData.map(label => label.name);
|
|
||||||
const managedLabels = currentLabels.filter(label =>
|
|
||||||
label.startsWith('component: ') || MANAGED_LABELS.includes(label)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check for mega-PR early - if present, skip most automatic labeling
|
|
||||||
const isMegaPR = currentLabels.includes('mega-pr');
|
|
||||||
|
|
||||||
// Get all PR files with automatic pagination
|
|
||||||
const prFiles = await github.paginate(
|
|
||||||
github.rest.pulls.listFiles,
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr_number
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Calculate data from PR files
|
|
||||||
const changedFiles = prFiles.map(file => file.filename);
|
|
||||||
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
|
|
||||||
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
|
||||||
const totalChanges = totalAdditions + totalDeletions;
|
|
||||||
|
|
||||||
console.log('Current labels:', currentLabels.join(', '));
|
|
||||||
console.log('Changed files:', changedFiles.length);
|
|
||||||
console.log('Total changes:', totalChanges);
|
|
||||||
if (isMegaPR) {
|
|
||||||
console.log('Mega-PR detected - applying limited labeling logic');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch API data
|
|
||||||
async function fetchApiData() {
|
|
||||||
try {
|
|
||||||
const response = await fetch('https://data.esphome.io/components.json');
|
|
||||||
const componentsData = await response.json();
|
|
||||||
return {
|
|
||||||
targetPlatforms: componentsData.target_platforms || [],
|
|
||||||
platformComponents: componentsData.platform_components || []
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
console.log('Failed to fetch components data from API:', error.message);
|
|
||||||
return { targetPlatforms: [], platformComponents: [] };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Merge branch detection
|
|
||||||
async function detectMergeBranch() {
|
|
||||||
const labels = new Set();
|
|
||||||
const baseRef = context.payload.pull_request.base.ref;
|
|
||||||
|
|
||||||
if (baseRef === 'release') {
|
|
||||||
labels.add('merging-to-release');
|
|
||||||
} else if (baseRef === 'beta') {
|
|
||||||
labels.add('merging-to-beta');
|
|
||||||
} else if (baseRef !== 'dev') {
|
|
||||||
labels.add('chained-pr');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Component and platform labeling
|
|
||||||
async function detectComponentPlatforms(apiData) {
|
|
||||||
const labels = new Set();
|
|
||||||
const componentRegex = /^esphome\/components\/([^\/]+)\//;
|
|
||||||
const targetPlatformRegex = new RegExp(`^esphome\/components\/(${apiData.targetPlatforms.join('|')})/`);
|
|
||||||
|
|
||||||
for (const file of changedFiles) {
|
|
||||||
const componentMatch = file.match(componentRegex);
|
|
||||||
if (componentMatch) {
|
|
||||||
labels.add(`component: ${componentMatch[1]}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const platformMatch = file.match(targetPlatformRegex);
|
|
||||||
if (platformMatch) {
|
|
||||||
labels.add(`platform: ${platformMatch[1]}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: New component detection
|
|
||||||
async function detectNewComponents() {
|
|
||||||
const labels = new Set();
|
|
||||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
|
||||||
|
|
||||||
for (const file of addedFiles) {
|
|
||||||
const componentMatch = file.match(/^esphome\/components\/([^\/]+)\/__init__\.py$/);
|
|
||||||
if (componentMatch) {
|
|
||||||
try {
|
|
||||||
const content = fs.readFileSync(file, 'utf8');
|
|
||||||
if (content.includes('IS_TARGET_PLATFORM = True')) {
|
|
||||||
labels.add('new-target-platform');
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Failed to read content of ${file}:`, error.message);
|
|
||||||
}
|
|
||||||
labels.add('new-component');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: New platform detection
|
|
||||||
async function detectNewPlatforms(apiData) {
|
|
||||||
const labels = new Set();
|
|
||||||
const addedFiles = prFiles.filter(file => file.status === 'added').map(file => file.filename);
|
|
||||||
|
|
||||||
for (const file of addedFiles) {
|
|
||||||
const platformFileMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\.py$/);
|
|
||||||
if (platformFileMatch) {
|
|
||||||
const [, component, platform] = platformFileMatch;
|
|
||||||
if (apiData.platformComponents.includes(platform)) {
|
|
||||||
labels.add('new-platform');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const platformDirMatch = file.match(/^esphome\/components\/([^\/]+)\/([^\/]+)\/__init__\.py$/);
|
|
||||||
if (platformDirMatch) {
|
|
||||||
const [, component, platform] = platformDirMatch;
|
|
||||||
if (apiData.platformComponents.includes(platform)) {
|
|
||||||
labels.add('new-platform');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Core files detection
|
|
||||||
async function detectCoreChanges() {
|
|
||||||
const labels = new Set();
|
|
||||||
const coreFiles = changedFiles.filter(file =>
|
|
||||||
file.startsWith('esphome/core/') ||
|
|
||||||
(file.startsWith('esphome/') && file.split('/').length === 2)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (coreFiles.length > 0) {
|
|
||||||
labels.add('core');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: PR size detection
|
|
||||||
async function detectPRSize() {
|
|
||||||
const labels = new Set();
|
|
||||||
|
|
||||||
if (totalChanges <= SMALL_PR_THRESHOLD) {
|
|
||||||
labels.add('small-pr');
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
const testAdditions = prFiles
|
|
||||||
.filter(file => file.filename.startsWith('tests/'))
|
|
||||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
|
||||||
const testDeletions = prFiles
|
|
||||||
.filter(file => file.filename.startsWith('tests/'))
|
|
||||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
|
||||||
|
|
||||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
|
||||||
|
|
||||||
// Don't add too-big if mega-pr label is already present
|
|
||||||
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
|
|
||||||
labels.add('too-big');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Dashboard changes
|
|
||||||
async function detectDashboardChanges() {
|
|
||||||
const labels = new Set();
|
|
||||||
const dashboardFiles = changedFiles.filter(file =>
|
|
||||||
file.startsWith('esphome/dashboard/') ||
|
|
||||||
file.startsWith('esphome/components/dashboard_import/')
|
|
||||||
);
|
|
||||||
|
|
||||||
if (dashboardFiles.length > 0) {
|
|
||||||
labels.add('dashboard');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: GitHub Actions changes
|
|
||||||
async function detectGitHubActionsChanges() {
|
|
||||||
const labels = new Set();
|
|
||||||
const githubActionsFiles = changedFiles.filter(file =>
|
|
||||||
file.startsWith('.github/workflows/')
|
|
||||||
);
|
|
||||||
|
|
||||||
if (githubActionsFiles.length > 0) {
|
|
||||||
labels.add('github-actions');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Code owner detection
|
|
||||||
async function detectCodeOwner() {
|
|
||||||
const labels = new Set();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { data: codeownersFile } = await github.rest.repos.getContent({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
path: 'CODEOWNERS',
|
|
||||||
});
|
|
||||||
|
|
||||||
const codeownersContent = Buffer.from(codeownersFile.content, 'base64').toString('utf8');
|
|
||||||
const prAuthor = context.payload.pull_request.user.login;
|
|
||||||
|
|
||||||
const codeownersLines = codeownersContent.split('\n')
|
|
||||||
.map(line => line.trim())
|
|
||||||
.filter(line => line && !line.startsWith('#'));
|
|
||||||
|
|
||||||
const codeownersRegexes = codeownersLines.map(line => {
|
|
||||||
const parts = line.split(/\s+/);
|
|
||||||
const pattern = parts[0];
|
|
||||||
const owners = parts.slice(1);
|
|
||||||
|
|
||||||
let regex;
|
|
||||||
if (pattern.endsWith('*')) {
|
|
||||||
const dir = pattern.slice(0, -1);
|
|
||||||
regex = new RegExp(`^${dir.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`);
|
|
||||||
} else if (pattern.includes('*')) {
|
|
||||||
// First escape all regex special chars except *, then replace * with .*
|
|
||||||
const regexPattern = pattern
|
|
||||||
.replace(/[.+?^${}()|[\]\\]/g, '\\$&')
|
|
||||||
.replace(/\*/g, '.*');
|
|
||||||
regex = new RegExp(`^${regexPattern}$`);
|
|
||||||
} else {
|
|
||||||
regex = new RegExp(`^${pattern.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { regex, owners };
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const file of changedFiles) {
|
|
||||||
for (const { regex, owners } of codeownersRegexes) {
|
|
||||||
if (regex.test(file) && owners.some(owner => owner === `@${prAuthor}`)) {
|
|
||||||
labels.add('by-code-owner');
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log('Failed to read or parse CODEOWNERS file:', error.message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Test detection
|
|
||||||
async function detectTests() {
|
|
||||||
const labels = new Set();
|
|
||||||
const testFiles = changedFiles.filter(file => file.startsWith('tests/'));
|
|
||||||
|
|
||||||
if (testFiles.length > 0) {
|
|
||||||
labels.add('has-tests');
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: PR Template Checkbox detection
|
|
||||||
async function detectPRTemplateCheckboxes() {
|
|
||||||
const labels = new Set();
|
|
||||||
const prBody = context.payload.pull_request.body || '';
|
|
||||||
|
|
||||||
console.log('Checking PR template checkboxes...');
|
|
||||||
|
|
||||||
// Check for checked checkboxes in the "Types of changes" section
|
|
||||||
const checkboxPatterns = [
|
|
||||||
{ pattern: /- \[x\] Bugfix \(non-breaking change which fixes an issue\)/i, label: 'bugfix' },
|
|
||||||
{ pattern: /- \[x\] New feature \(non-breaking change which adds functionality\)/i, label: 'new-feature' },
|
|
||||||
{ pattern: /- \[x\] Breaking change \(fix or feature that would cause existing functionality to not work as expected\)/i, label: 'breaking-change' },
|
|
||||||
{ pattern: /- \[x\] Developer breaking change \(an API change that could break external components\)/i, label: 'developer-breaking-change' },
|
|
||||||
{ pattern: /- \[x\] Code quality improvements to existing code or addition of tests/i, label: 'code-quality' }
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const { pattern, label } of checkboxPatterns) {
|
|
||||||
if (pattern.test(prBody)) {
|
|
||||||
console.log(`Found checked checkbox for: ${label}`);
|
|
||||||
labels.add(label);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Strategy: Requirements detection
|
|
||||||
async function detectRequirements(allLabels) {
|
|
||||||
const labels = new Set();
|
|
||||||
|
|
||||||
// Check for missing tests
|
|
||||||
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
|
|
||||||
labels.add('needs-tests');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for missing docs
|
|
||||||
if (allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) {
|
|
||||||
const prBody = context.payload.pull_request.body || '';
|
|
||||||
const hasDocsLink = DOCS_PR_PATTERNS.some(pattern => pattern.test(prBody));
|
|
||||||
|
|
||||||
if (!hasDocsLink) {
|
|
||||||
labels.add('needs-docs');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for missing CODEOWNERS
|
|
||||||
if (allLabels.has('new-component')) {
|
|
||||||
const codeownersModified = prFiles.some(file =>
|
|
||||||
file.filename === 'CODEOWNERS' &&
|
|
||||||
(file.status === 'modified' || file.status === 'added') &&
|
|
||||||
(file.additions || 0) > 0
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!codeownersModified) {
|
|
||||||
labels.add('needs-codeowners');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate review messages
|
|
||||||
function generateReviewMessages(finalLabels, originalLabelCount) {
|
|
||||||
const messages = [];
|
|
||||||
const prAuthor = context.payload.pull_request.user.login;
|
|
||||||
|
|
||||||
// Too big message
|
|
||||||
if (finalLabels.includes('too-big')) {
|
|
||||||
const testAdditions = prFiles
|
|
||||||
.filter(file => file.filename.startsWith('tests/'))
|
|
||||||
.reduce((sum, file) => sum + (file.additions || 0), 0);
|
|
||||||
const testDeletions = prFiles
|
|
||||||
.filter(file => file.filename.startsWith('tests/'))
|
|
||||||
.reduce((sum, file) => sum + (file.deletions || 0), 0);
|
|
||||||
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
|
|
||||||
|
|
||||||
const tooManyLabels = originalLabelCount > MAX_LABELS;
|
|
||||||
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;
|
|
||||||
|
|
||||||
let message = `${TOO_BIG_MARKER}\n### 📦 Pull Request Size\n\n`;
|
|
||||||
|
|
||||||
if (tooManyLabels && tooManyChanges) {
|
|
||||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests) and affects ${originalLabelCount} different components/areas.`;
|
|
||||||
} else if (tooManyLabels) {
|
|
||||||
message += `This PR affects ${originalLabelCount} different components/areas.`;
|
|
||||||
} else {
|
|
||||||
message += `This PR is too large with ${nonTestChanges} line changes (excluding tests).`;
|
|
||||||
}
|
|
||||||
|
|
||||||
message += ` Please consider breaking it down into smaller, focused PRs to make review easier and reduce the risk of conflicts.\n\n`;
|
|
||||||
message += `For guidance on breaking down large PRs, see: https://developers.esphome.io/contributing/submitting-your-work/#how-to-approach-large-submissions`;
|
|
||||||
|
|
||||||
messages.push(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
// CODEOWNERS message
|
|
||||||
if (finalLabels.includes('needs-codeowners')) {
|
|
||||||
const message = `${CODEOWNERS_MARKER}\n### 👥 Code Ownership\n\n` +
|
|
||||||
`Hey there @${prAuthor},\n` +
|
|
||||||
`Thanks for submitting this pull request! Can you add yourself as a codeowner for this integration? ` +
|
|
||||||
`This way we can notify you if a bug report for this integration is reported.\n\n` +
|
|
||||||
`In \`__init__.py\` of the integration, please add:\n\n` +
|
|
||||||
`\`\`\`python\nCODEOWNERS = ["@${prAuthor}"]\n\`\`\`\n\n` +
|
|
||||||
`And run \`script/build_codeowners.py\``;
|
|
||||||
|
|
||||||
messages.push(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return messages;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle reviews
|
|
||||||
async function handleReviews(finalLabels, originalLabelCount) {
|
|
||||||
const reviewMessages = generateReviewMessages(finalLabels, originalLabelCount);
|
|
||||||
const hasReviewableLabels = finalLabels.some(label =>
|
|
||||||
['too-big', 'needs-codeowners'].includes(label)
|
|
||||||
);
|
|
||||||
|
|
||||||
const { data: reviews } = await github.rest.pulls.listReviews({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr_number
|
|
||||||
});
|
|
||||||
|
|
||||||
const botReviews = reviews.filter(review =>
|
|
||||||
review.user.type === 'Bot' &&
|
|
||||||
review.state === 'CHANGES_REQUESTED' &&
|
|
||||||
review.body && review.body.includes(BOT_COMMENT_MARKER)
|
|
||||||
);
|
|
||||||
|
|
||||||
if (hasReviewableLabels) {
|
|
||||||
const reviewBody = `${BOT_COMMENT_MARKER}\n\n${reviewMessages.join('\n\n---\n\n')}`;
|
|
||||||
|
|
||||||
if (botReviews.length > 0) {
|
|
||||||
// Update existing review
|
|
||||||
await github.rest.pulls.updateReview({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr_number,
|
|
||||||
review_id: botReviews[0].id,
|
|
||||||
body: reviewBody
|
|
||||||
});
|
|
||||||
console.log('Updated existing bot review');
|
|
||||||
} else {
|
|
||||||
// Create new review
|
|
||||||
await github.rest.pulls.createReview({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr_number,
|
|
||||||
body: reviewBody,
|
|
||||||
event: 'REQUEST_CHANGES'
|
|
||||||
});
|
|
||||||
console.log('Created new bot review');
|
|
||||||
}
|
|
||||||
} else if (botReviews.length > 0) {
|
|
||||||
// Dismiss existing reviews
|
|
||||||
for (const review of botReviews) {
|
|
||||||
try {
|
|
||||||
await github.rest.pulls.dismissReview({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: pr_number,
|
|
||||||
review_id: review.id,
|
|
||||||
message: 'Review dismissed: All requirements have been met'
|
|
||||||
});
|
|
||||||
console.log(`Dismissed bot review ${review.id}`);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Failed to dismiss review ${review.id}:`, error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Main execution
|
|
||||||
const apiData = await fetchApiData();
|
|
||||||
const baseRef = context.payload.pull_request.base.ref;
|
|
||||||
|
|
||||||
// Early exit for release and beta branches only
|
|
||||||
if (baseRef === 'release' || baseRef === 'beta') {
|
|
||||||
const branchLabels = await detectMergeBranch();
|
|
||||||
const finalLabels = Array.from(branchLabels);
|
|
||||||
|
|
||||||
console.log('Computed labels (merge branch only):', finalLabels.join(', '));
|
|
||||||
|
|
||||||
// Apply labels
|
|
||||||
if (finalLabels.length > 0) {
|
|
||||||
await github.rest.issues.addLabels({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr_number,
|
|
||||||
labels: finalLabels
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove old managed labels
|
|
||||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
|
||||||
for (const label of labelsToRemove) {
|
|
||||||
try {
|
|
||||||
await github.rest.issues.removeLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr_number,
|
|
||||||
name: label
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Failed to remove label ${label}:`, error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run all strategies
|
|
||||||
const [
|
|
||||||
branchLabels,
|
|
||||||
componentLabels,
|
|
||||||
newComponentLabels,
|
|
||||||
newPlatformLabels,
|
|
||||||
coreLabels,
|
|
||||||
sizeLabels,
|
|
||||||
dashboardLabels,
|
|
||||||
actionsLabels,
|
|
||||||
codeOwnerLabels,
|
|
||||||
testLabels,
|
|
||||||
checkboxLabels
|
|
||||||
] = await Promise.all([
|
|
||||||
detectMergeBranch(),
|
|
||||||
detectComponentPlatforms(apiData),
|
|
||||||
detectNewComponents(),
|
|
||||||
detectNewPlatforms(apiData),
|
|
||||||
detectCoreChanges(),
|
|
||||||
detectPRSize(),
|
|
||||||
detectDashboardChanges(),
|
|
||||||
detectGitHubActionsChanges(),
|
|
||||||
detectCodeOwner(),
|
|
||||||
detectTests(),
|
|
||||||
detectPRTemplateCheckboxes()
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Combine all labels
|
|
||||||
const allLabels = new Set([
|
|
||||||
...branchLabels,
|
|
||||||
...componentLabels,
|
|
||||||
...newComponentLabels,
|
|
||||||
...newPlatformLabels,
|
|
||||||
...coreLabels,
|
|
||||||
...sizeLabels,
|
|
||||||
...dashboardLabels,
|
|
||||||
...actionsLabels,
|
|
||||||
...codeOwnerLabels,
|
|
||||||
...testLabels,
|
|
||||||
...checkboxLabels
|
|
||||||
]);
|
|
||||||
|
|
||||||
// Detect requirements based on all other labels
|
|
||||||
const requirementLabels = await detectRequirements(allLabels);
|
|
||||||
for (const label of requirementLabels) {
|
|
||||||
allLabels.add(label);
|
|
||||||
}
|
|
||||||
|
|
||||||
let finalLabels = Array.from(allLabels);
|
|
||||||
|
|
||||||
// For mega-PRs, exclude component labels if there are too many
|
|
||||||
if (isMegaPR) {
|
|
||||||
const componentLabels = finalLabels.filter(label => label.startsWith('component: '));
|
|
||||||
if (componentLabels.length > COMPONENT_LABEL_THRESHOLD) {
|
|
||||||
finalLabels = finalLabels.filter(label => !label.startsWith('component: '));
|
|
||||||
console.log(`Mega-PR detected - excluding ${componentLabels.length} component labels (threshold: ${COMPONENT_LABEL_THRESHOLD})`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle too many labels (only for non-mega PRs)
|
|
||||||
const tooManyLabels = finalLabels.length > MAX_LABELS;
|
|
||||||
const originalLabelCount = finalLabels.length;
|
|
||||||
|
|
||||||
if (tooManyLabels && !isMegaPR && !finalLabels.includes('too-big')) {
|
|
||||||
finalLabels = ['too-big'];
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Computed labels:', finalLabels.join(', '));
|
|
||||||
|
|
||||||
// Handle reviews
|
|
||||||
await handleReviews(finalLabels, originalLabelCount);
|
|
||||||
|
|
||||||
// Apply labels
|
|
||||||
if (finalLabels.length > 0) {
|
|
||||||
console.log(`Adding labels: ${finalLabels.join(', ')}`);
|
|
||||||
await github.rest.issues.addLabels({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr_number,
|
|
||||||
labels: finalLabels
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove old managed labels
|
|
||||||
const labelsToRemove = managedLabels.filter(label => !finalLabels.includes(label));
|
|
||||||
for (const label of labelsToRemove) {
|
|
||||||
console.log(`Removing label: ${label}`);
|
|
||||||
try {
|
|
||||||
await github.rest.issues.removeLabel({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: pr_number,
|
|
||||||
name: label
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Failed to remove label ${label}:`, error.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
4
.github/workflows/ci-api-proto.yml
vendored
4
.github/workflows/ci-api-proto.yml
vendored
@@ -21,9 +21,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/ci-clang-tidy-hash.yml
vendored
4
.github/workflows/ci-clang-tidy-hash.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/ci-docker.yml
vendored
4
.github/workflows/ci-docker.yml
vendored
@@ -43,9 +43,9 @@ jobs:
|
|||||||
- "docker"
|
- "docker"
|
||||||
# - "lint"
|
# - "lint"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Check out code from base repository
|
- name: Check out code from base repository
|
||||||
if: steps.pr.outputs.skip != 'true'
|
if: steps.pr.outputs.skip != 'true'
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
# Always check out from the base repository (esphome/esphome), never from forks
|
# Always check out from the base repository (esphome/esphome), never from forks
|
||||||
# Use the PR's target branch to ensure we run trusted code from the main repo
|
# Use the PR's target branch to ensure we run trusted code from the main repo
|
||||||
|
|||||||
64
.github/workflows/ci.yml
vendored
64
.github/workflows/ci.yml
vendored
@@ -36,18 +36,18 @@ jobs:
|
|||||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Generate cache-key
|
- name: Generate cache-key
|
||||||
id: cache-key
|
id: cache-key
|
||||||
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
# yamllint disable-line rule:line-length
|
# yamllint disable-line rule:line-length
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
if: needs.determine-jobs.outputs.python-linters == 'true'
|
if: needs.determine-jobs.outputs.python-linters == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
- common
|
- common
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
@@ -132,7 +132,7 @@ jobs:
|
|||||||
- common
|
- common
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
id: restore-python
|
id: restore-python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
@@ -157,7 +157,7 @@ jobs:
|
|||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: Save Python virtual environment cache
|
- name: Save Python virtual environment cache
|
||||||
if: github.ref == 'refs/heads/dev'
|
if: github.ref == 'refs/heads/dev'
|
||||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||||
@@ -183,7 +183,7 @@ jobs:
|
|||||||
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
|
component-test-batches: ${{ steps.determine.outputs.component-test-batches }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
# Fetch enough history to find the merge base
|
# Fetch enough history to find the merge base
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
@@ -193,7 +193,7 @@ jobs:
|
|||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||||
- name: Restore components graph cache
|
- name: Restore components graph cache
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: .temp/components_graph.json
|
path: .temp/components_graph.json
|
||||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||||
@@ -223,7 +223,7 @@ jobs:
|
|||||||
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
|
echo "component-test-batches=$(echo "$output" | jq -c '.component_test_batches')" >> $GITHUB_OUTPUT
|
||||||
- name: Save components graph cache
|
- name: Save components graph cache
|
||||||
if: github.ref == 'refs/heads/dev'
|
if: github.ref == 'refs/heads/dev'
|
||||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: .temp/components_graph.json
|
path: .temp/components_graph.json
|
||||||
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
key: components-graph-${{ hashFiles('esphome/components/**/*.py') }}
|
||||||
@@ -237,15 +237,15 @@ jobs:
|
|||||||
if: needs.determine-jobs.outputs.integration-tests == 'true'
|
if: needs.determine-jobs.outputs.integration-tests == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Set up Python 3.13
|
- name: Set up Python 3.13
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.13"
|
python-version: "3.13"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
|
||||||
@@ -273,7 +273,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
|
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
@@ -321,7 +321,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
# Need history for HEAD~1 to work for checking changed files
|
# Need history for HEAD~1 to work for checking changed files
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
@@ -334,14 +334,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref == 'refs/heads/dev'
|
if: github.ref == 'refs/heads/dev'
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref != 'refs/heads/dev'
|
if: github.ref != 'refs/heads/dev'
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||||
@@ -400,7 +400,7 @@ jobs:
|
|||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
# Need history for HEAD~1 to work for checking changed files
|
# Need history for HEAD~1 to work for checking changed files
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
@@ -413,14 +413,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref == 'refs/heads/dev'
|
if: github.ref == 'refs/heads/dev'
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref != 'refs/heads/dev'
|
if: github.ref != 'refs/heads/dev'
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||||
@@ -489,7 +489,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
# Need history for HEAD~1 to work for checking changed files
|
# Need history for HEAD~1 to work for checking changed files
|
||||||
fetch-depth: 2
|
fetch-depth: 2
|
||||||
@@ -502,14 +502,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref == 'refs/heads/dev'
|
if: github.ref == 'refs/heads/dev'
|
||||||
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: github.ref != 'refs/heads/dev'
|
if: github.ref != 'refs/heads/dev'
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
key: platformio-tidyesp32-${{ hashFiles('platformio.ini') }}
|
||||||
@@ -577,7 +577,7 @@ jobs:
|
|||||||
version: 1.0
|
version: 1.0
|
||||||
|
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
@@ -662,7 +662,7 @@ jobs:
|
|||||||
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
@@ -688,7 +688,7 @@ jobs:
|
|||||||
skip: ${{ steps.check-script.outputs.skip }}
|
skip: ${{ steps.check-script.outputs.skip }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out target branch
|
- name: Check out target branch
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.base_ref }}
|
ref: ${{ github.base_ref }}
|
||||||
|
|
||||||
@@ -735,7 +735,7 @@ jobs:
|
|||||||
- name: Restore cached memory analysis
|
- name: Restore cached memory analysis
|
||||||
id: cache-memory-analysis
|
id: cache-memory-analysis
|
||||||
if: steps.check-script.outputs.skip != 'true'
|
if: steps.check-script.outputs.skip != 'true'
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: memory-analysis-target.json
|
path: memory-analysis-target.json
|
||||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||||
@@ -759,7 +759,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||||
@@ -800,7 +800,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Save memory analysis to cache
|
- name: Save memory analysis to cache
|
||||||
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
|
||||||
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: memory-analysis-target.json
|
path: memory-analysis-target.json
|
||||||
key: ${{ steps.cache-key.outputs.cache-key }}
|
key: ${{ steps.cache-key.outputs.cache-key }}
|
||||||
@@ -840,14 +840,14 @@ jobs:
|
|||||||
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
flash_usage: ${{ steps.extract.outputs.flash_usage }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out PR branch
|
- name: Check out PR branch
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||||
- name: Cache platformio
|
- name: Cache platformio
|
||||||
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||||
with:
|
with:
|
||||||
path: ~/.platformio
|
path: ~/.platformio
|
||||||
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
|
||||||
@@ -908,7 +908,7 @@ jobs:
|
|||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Restore Python
|
- name: Restore Python
|
||||||
uses: ./.github/actions/restore-python
|
uses: ./.github/actions/restore-python
|
||||||
with:
|
with:
|
||||||
|
|||||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -54,11 +54,11 @@ jobs:
|
|||||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
build-mode: ${{ matrix.build-mode }}
|
build-mode: ${{ matrix.build-mode }}
|
||||||
@@ -86,6 +86,6 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
|||||||
20
.github/workflows/release.yml
vendored
20
.github/workflows/release.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
branch_build: ${{ steps.tag.outputs.branch_build }}
|
branch_build: ${{ steps.tag.outputs.branch_build }}
|
||||||
deploy_env: ${{ steps.tag.outputs.deploy_env }}
|
deploy_env: ${{ steps.tag.outputs.deploy_env }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Get tag
|
- name: Get tag
|
||||||
id: tag
|
id: tag
|
||||||
# yamllint disable rule:line-length
|
# yamllint disable rule:line-length
|
||||||
@@ -60,9 +60,9 @@ jobs:
|
|||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- name: Build
|
- name: Build
|
||||||
@@ -92,9 +92,9 @@ jobs:
|
|||||||
os: "ubuntu-24.04-arm"
|
os: "ubuntu-24.04-arm"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
|
|
||||||
@@ -102,12 +102,12 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||||
|
|
||||||
- name: Log in to docker hub
|
- name: Log in to docker hub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USER }}
|
username: ${{ secrets.DOCKER_USER }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Log in to the GitHub container registry
|
- name: Log in to the GitHub container registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -168,7 +168,7 @@ jobs:
|
|||||||
- ghcr
|
- ghcr
|
||||||
- dockerhub
|
- dockerhub
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||||
@@ -182,13 +182,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to docker hub
|
- name: Log in to docker hub
|
||||||
if: matrix.registry == 'dockerhub'
|
if: matrix.registry == 'dockerhub'
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USER }}
|
username: ${{ secrets.DOCKER_USER }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
- name: Log in to the GitHub container registry
|
- name: Log in to the GitHub container registry
|
||||||
if: matrix.registry == 'ghcr'
|
if: matrix.registry == 'ghcr'
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
|
|||||||
8
.github/workflows/sync-device-classes.yml
vendored
8
.github/workflows/sync-device-classes.yml
vendored
@@ -13,16 +13,16 @@ jobs:
|
|||||||
if: github.repository == 'esphome/esphome'
|
if: github.repository == 'esphome/esphome'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
|
|
||||||
- name: Checkout Home Assistant
|
- name: Checkout Home Assistant
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||||
with:
|
with:
|
||||||
repository: home-assistant/core
|
repository: home-assistant/core
|
||||||
path: lib/home-assistant
|
path: lib/home-assistant
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||||
with:
|
with:
|
||||||
python-version: 3.13
|
python-version: 3.13
|
||||||
|
|
||||||
@@ -41,7 +41,7 @@ jobs:
|
|||||||
python script/run-in-env.py pre-commit run --all-files
|
python script/run-in-env.py pre-commit run --all-files
|
||||||
|
|
||||||
- name: Commit changes
|
- name: Commit changes
|
||||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||||
with:
|
with:
|
||||||
commit-message: "Synchronise Device Classes from Home Assistant"
|
commit-message: "Synchronise Device Classes from Home Assistant"
|
||||||
committer: esphomebot <esphome@openhomefoundation.org>
|
committer: esphomebot <esphome@openhomefoundation.org>
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ ci:
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.14.11
|
rev: v0.15.0
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|||||||
@@ -88,7 +88,8 @@ esphome/components/bmp3xx/* @latonita
|
|||||||
esphome/components/bmp3xx_base/* @latonita @martgras
|
esphome/components/bmp3xx_base/* @latonita @martgras
|
||||||
esphome/components/bmp3xx_i2c/* @latonita
|
esphome/components/bmp3xx_i2c/* @latonita
|
||||||
esphome/components/bmp3xx_spi/* @latonita
|
esphome/components/bmp3xx_spi/* @latonita
|
||||||
esphome/components/bmp581/* @kahrendt
|
esphome/components/bmp581_base/* @danielkent-net @kahrendt
|
||||||
|
esphome/components/bmp581_i2c/* @danielkent-net @kahrendt
|
||||||
esphome/components/bp1658cj/* @Cossid
|
esphome/components/bp1658cj/* @Cossid
|
||||||
esphome/components/bp5758d/* @Cossid
|
esphome/components/bp5758d/* @Cossid
|
||||||
esphome/components/bthome_mithermometer/* @nagyrobi
|
esphome/components/bthome_mithermometer/* @nagyrobi
|
||||||
@@ -103,6 +104,7 @@ esphome/components/cc1101/* @gabest11 @lygris
|
|||||||
esphome/components/ccs811/* @habbie
|
esphome/components/ccs811/* @habbie
|
||||||
esphome/components/cd74hc4067/* @asoehlke
|
esphome/components/cd74hc4067/* @asoehlke
|
||||||
esphome/components/ch422g/* @clydebarrow @jesterret
|
esphome/components/ch422g/* @clydebarrow @jesterret
|
||||||
|
esphome/components/ch423/* @dwmw2
|
||||||
esphome/components/chsc6x/* @kkosik20
|
esphome/components/chsc6x/* @kkosik20
|
||||||
esphome/components/climate/* @esphome/core
|
esphome/components/climate/* @esphome/core
|
||||||
esphome/components/climate_ir/* @glmnet
|
esphome/components/climate_ir/* @glmnet
|
||||||
@@ -132,6 +134,7 @@ esphome/components/dfplayer/* @glmnet
|
|||||||
esphome/components/dfrobot_sen0395/* @niklasweber
|
esphome/components/dfrobot_sen0395/* @niklasweber
|
||||||
esphome/components/dht/* @OttoWinter
|
esphome/components/dht/* @OttoWinter
|
||||||
esphome/components/display_menu_base/* @numo68
|
esphome/components/display_menu_base/* @numo68
|
||||||
|
esphome/components/dlms_meter/* @SimonFischer04
|
||||||
esphome/components/dps310/* @kbx81
|
esphome/components/dps310/* @kbx81
|
||||||
esphome/components/ds1307/* @badbadc0ffee
|
esphome/components/ds1307/* @badbadc0ffee
|
||||||
esphome/components/ds2484/* @mrk-its
|
esphome/components/ds2484/* @mrk-its
|
||||||
@@ -481,6 +484,7 @@ esphome/components/switch/* @esphome/core
|
|||||||
esphome/components/switch/binary_sensor/* @ssieb
|
esphome/components/switch/binary_sensor/* @ssieb
|
||||||
esphome/components/sx126x/* @swoboda1337
|
esphome/components/sx126x/* @swoboda1337
|
||||||
esphome/components/sx127x/* @swoboda1337
|
esphome/components/sx127x/* @swoboda1337
|
||||||
|
esphome/components/sy6970/* @linkedupbits
|
||||||
esphome/components/syslog/* @clydebarrow
|
esphome/components/syslog/* @clydebarrow
|
||||||
esphome/components/t6615/* @tylermenezes
|
esphome/components/t6615/* @tylermenezes
|
||||||
esphome/components/tc74/* @sethgirvan
|
esphome/components/tc74/* @sethgirvan
|
||||||
@@ -528,7 +532,7 @@ esphome/components/uart/packet_transport/* @clydebarrow
|
|||||||
esphome/components/udp/* @clydebarrow
|
esphome/components/udp/* @clydebarrow
|
||||||
esphome/components/ufire_ec/* @pvizeli
|
esphome/components/ufire_ec/* @pvizeli
|
||||||
esphome/components/ufire_ise/* @pvizeli
|
esphome/components/ufire_ise/* @pvizeli
|
||||||
esphome/components/ultrasonic/* @OttoWinter
|
esphome/components/ultrasonic/* @ssieb @swoboda1337
|
||||||
esphome/components/update/* @jesserockz
|
esphome/components/update/* @jesserockz
|
||||||
esphome/components/uponor_smatrix/* @kroimon
|
esphome/components/uponor_smatrix/* @kroimon
|
||||||
esphome/components/usb_cdc_acm/* @kbx81
|
esphome/components/usb_cdc_acm/* @kbx81
|
||||||
|
|||||||
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
|||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 2026.1.0b3
|
PROJECT_NUMBER = 2026.2.0-dev
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
# PYTHON_ARGCOMPLETE_OK
|
# PYTHON_ARGCOMPLETE_OK
|
||||||
import argparse
|
import argparse
|
||||||
|
from collections.abc import Callable
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import functools
|
import functools
|
||||||
import getpass
|
import getpass
|
||||||
@@ -42,6 +43,7 @@ from esphome.const import (
|
|||||||
CONF_SUBSTITUTIONS,
|
CONF_SUBSTITUTIONS,
|
||||||
CONF_TOPIC,
|
CONF_TOPIC,
|
||||||
ENV_NOGITIGNORE,
|
ENV_NOGITIGNORE,
|
||||||
|
KEY_NATIVE_IDF,
|
||||||
PLATFORM_ESP32,
|
PLATFORM_ESP32,
|
||||||
PLATFORM_ESP8266,
|
PLATFORM_ESP8266,
|
||||||
PLATFORM_RP2040,
|
PLATFORM_RP2040,
|
||||||
@@ -115,6 +117,7 @@ class ArgsProtocol(Protocol):
|
|||||||
configuration: str
|
configuration: str
|
||||||
name: str
|
name: str
|
||||||
upload_speed: str | None
|
upload_speed: str | None
|
||||||
|
native_idf: bool
|
||||||
|
|
||||||
|
|
||||||
def choose_prompt(options, purpose: str = None):
|
def choose_prompt(options, purpose: str = None):
|
||||||
@@ -222,8 +225,13 @@ def choose_upload_log_host(
|
|||||||
else:
|
else:
|
||||||
resolved.append(device)
|
resolved.append(device)
|
||||||
if not resolved:
|
if not resolved:
|
||||||
|
if CORE.dashboard:
|
||||||
|
hint = "If you know the IP, set 'use_address' in your network config."
|
||||||
|
else:
|
||||||
|
hint = "If you know the IP, try --device <IP>"
|
||||||
raise EsphomeError(
|
raise EsphomeError(
|
||||||
f"All specified devices {defaults} could not be resolved. Is the device connected to the network?"
|
f"All specified devices {defaults} could not be resolved. "
|
||||||
|
f"Is the device connected to the network? {hint}"
|
||||||
)
|
)
|
||||||
return resolved
|
return resolved
|
||||||
|
|
||||||
@@ -286,8 +294,13 @@ def has_api() -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def has_ota() -> bool:
|
def has_ota() -> bool:
|
||||||
"""Check if OTA is available."""
|
"""Check if OTA upload is available (requires platform: esphome)."""
|
||||||
return CONF_OTA in CORE.config
|
if CONF_OTA not in CORE.config:
|
||||||
|
return False
|
||||||
|
return any(
|
||||||
|
ota_item.get(CONF_PLATFORM) == CONF_ESPHOME
|
||||||
|
for ota_item in CORE.config[CONF_OTA]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def has_mqtt_ip_lookup() -> bool:
|
def has_mqtt_ip_lookup() -> bool:
|
||||||
@@ -494,12 +507,15 @@ def wrap_to_code(name, comp):
|
|||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
def write_cpp(config: ConfigType) -> int:
|
def write_cpp(config: ConfigType, native_idf: bool = False) -> int:
|
||||||
if not get_bool_env(ENV_NOGITIGNORE):
|
if not get_bool_env(ENV_NOGITIGNORE):
|
||||||
writer.write_gitignore()
|
writer.write_gitignore()
|
||||||
|
|
||||||
|
# Store native_idf flag so esp32 component can check it
|
||||||
|
CORE.data[KEY_NATIVE_IDF] = native_idf
|
||||||
|
|
||||||
generate_cpp_contents(config)
|
generate_cpp_contents(config)
|
||||||
return write_cpp_file()
|
return write_cpp_file(native_idf=native_idf)
|
||||||
|
|
||||||
|
|
||||||
def generate_cpp_contents(config: ConfigType) -> None:
|
def generate_cpp_contents(config: ConfigType) -> None:
|
||||||
@@ -513,32 +529,54 @@ def generate_cpp_contents(config: ConfigType) -> None:
|
|||||||
CORE.flush_tasks()
|
CORE.flush_tasks()
|
||||||
|
|
||||||
|
|
||||||
def write_cpp_file() -> int:
|
def write_cpp_file(native_idf: bool = False) -> int:
|
||||||
code_s = indent(CORE.cpp_main_section)
|
code_s = indent(CORE.cpp_main_section)
|
||||||
writer.write_cpp(code_s)
|
writer.write_cpp(code_s)
|
||||||
|
|
||||||
from esphome.build_gen import platformio
|
if native_idf and CORE.is_esp32 and CORE.target_framework == "esp-idf":
|
||||||
|
from esphome.build_gen import espidf
|
||||||
|
|
||||||
platformio.write_project()
|
espidf.write_project()
|
||||||
|
else:
|
||||||
|
from esphome.build_gen import platformio
|
||||||
|
|
||||||
|
platformio.write_project()
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
||||||
from esphome import platformio_api
|
native_idf = getattr(args, "native_idf", False)
|
||||||
|
|
||||||
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
|
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
|
||||||
# If you change this format, update the regex in that script as well
|
# If you change this format, update the regex in that script as well
|
||||||
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
|
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
|
||||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
|
||||||
if rc != 0:
|
if native_idf and CORE.is_esp32 and CORE.target_framework == "esp-idf":
|
||||||
return rc
|
from esphome import espidf_api
|
||||||
|
|
||||||
|
rc = espidf_api.run_compile(config, CORE.verbose)
|
||||||
|
if rc != 0:
|
||||||
|
return rc
|
||||||
|
|
||||||
|
# Create factory.bin and ota.bin
|
||||||
|
espidf_api.create_factory_bin()
|
||||||
|
espidf_api.create_ota_bin()
|
||||||
|
else:
|
||||||
|
from esphome import platformio_api
|
||||||
|
|
||||||
|
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||||
|
if rc != 0:
|
||||||
|
return rc
|
||||||
|
|
||||||
|
idedata = platformio_api.get_idedata(config)
|
||||||
|
if idedata is None:
|
||||||
|
return 1
|
||||||
|
|
||||||
# Check if firmware was rebuilt and emit build_info + create manifest
|
# Check if firmware was rebuilt and emit build_info + create manifest
|
||||||
_check_and_emit_build_info()
|
_check_and_emit_build_info()
|
||||||
|
|
||||||
idedata = platformio_api.get_idedata(config)
|
return 0
|
||||||
return 0 if idedata is not None else 1
|
|
||||||
|
|
||||||
|
|
||||||
def _check_and_emit_build_info() -> None:
|
def _check_and_emit_build_info() -> None:
|
||||||
@@ -795,7 +833,8 @@ def command_vscode(args: ArgsProtocol) -> int | None:
|
|||||||
|
|
||||||
|
|
||||||
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
|
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||||
exit_code = write_cpp(config)
|
native_idf = getattr(args, "native_idf", False)
|
||||||
|
exit_code = write_cpp(config, native_idf=native_idf)
|
||||||
if exit_code != 0:
|
if exit_code != 0:
|
||||||
return exit_code
|
return exit_code
|
||||||
if args.only_generate:
|
if args.only_generate:
|
||||||
@@ -850,7 +889,8 @@ def command_logs(args: ArgsProtocol, config: ConfigType) -> int | None:
|
|||||||
|
|
||||||
|
|
||||||
def command_run(args: ArgsProtocol, config: ConfigType) -> int | None:
|
def command_run(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||||
exit_code = write_cpp(config)
|
native_idf = getattr(args, "native_idf", False)
|
||||||
|
exit_code = write_cpp(config, native_idf=native_idf)
|
||||||
if exit_code != 0:
|
if exit_code != 0:
|
||||||
return exit_code
|
return exit_code
|
||||||
exit_code = compile_program(args, config)
|
exit_code = compile_program(args, config)
|
||||||
@@ -931,11 +971,21 @@ def command_dashboard(args: ArgsProtocol) -> int | None:
|
|||||||
return dashboard.start_dashboard(args)
|
return dashboard.start_dashboard(args)
|
||||||
|
|
||||||
|
|
||||||
def command_update_all(args: ArgsProtocol) -> int | None:
|
def run_multiple_configs(
|
||||||
|
files: list, command_builder: Callable[[str], list[str]]
|
||||||
|
) -> int:
|
||||||
|
"""Run a command for each configuration file in a subprocess.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
files: List of configuration files to process.
|
||||||
|
command_builder: Callable that takes a file path and returns a command list.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of failed files.
|
||||||
|
"""
|
||||||
import click
|
import click
|
||||||
|
|
||||||
success = {}
|
success = {}
|
||||||
files = list_yaml_files(args.configuration)
|
|
||||||
twidth = 60
|
twidth = 60
|
||||||
|
|
||||||
def print_bar(middle_text):
|
def print_bar(middle_text):
|
||||||
@@ -945,17 +995,19 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
|||||||
safe_print(f"{half_line}{middle_text}{half_line}")
|
safe_print(f"{half_line}{middle_text}{half_line}")
|
||||||
|
|
||||||
for f in files:
|
for f in files:
|
||||||
safe_print(f"Updating {color(AnsiFore.CYAN, str(f))}")
|
f_path = Path(f) if not isinstance(f, Path) else f
|
||||||
|
|
||||||
|
if any(f_path.name == x for x in SECRETS_FILES):
|
||||||
|
_LOGGER.warning("Skipping secrets file %s", f_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
safe_print(f"Processing {color(AnsiFore.CYAN, str(f))}")
|
||||||
safe_print("-" * twidth)
|
safe_print("-" * twidth)
|
||||||
safe_print()
|
safe_print()
|
||||||
if CORE.dashboard:
|
|
||||||
rc = run_external_process(
|
cmd = command_builder(f)
|
||||||
"esphome", "--dashboard", "run", f, "--no-logs", "--device", "OTA"
|
rc = run_external_process(*cmd)
|
||||||
)
|
|
||||||
else:
|
|
||||||
rc = run_external_process(
|
|
||||||
"esphome", "run", f, "--no-logs", "--device", "OTA"
|
|
||||||
)
|
|
||||||
if rc == 0:
|
if rc == 0:
|
||||||
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
|
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
|
||||||
success[f] = True
|
success[f] = True
|
||||||
@@ -970,6 +1022,8 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
|||||||
print_bar(f"[{color(AnsiFore.BOLD_WHITE, 'SUMMARY')}]")
|
print_bar(f"[{color(AnsiFore.BOLD_WHITE, 'SUMMARY')}]")
|
||||||
failed = 0
|
failed = 0
|
||||||
for f in files:
|
for f in files:
|
||||||
|
if f not in success:
|
||||||
|
continue # Skipped file
|
||||||
if success[f]:
|
if success[f]:
|
||||||
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
|
||||||
else:
|
else:
|
||||||
@@ -978,6 +1032,17 @@ def command_update_all(args: ArgsProtocol) -> int | None:
|
|||||||
return failed
|
return failed
|
||||||
|
|
||||||
|
|
||||||
|
def command_update_all(args: ArgsProtocol) -> int | None:
|
||||||
|
files = list_yaml_files(args.configuration)
|
||||||
|
|
||||||
|
def build_command(f):
|
||||||
|
if CORE.dashboard:
|
||||||
|
return ["esphome", "--dashboard", "run", f, "--no-logs", "--device", "OTA"]
|
||||||
|
return ["esphome", "run", f, "--no-logs", "--device", "OTA"]
|
||||||
|
|
||||||
|
return run_multiple_configs(files, build_command)
|
||||||
|
|
||||||
|
|
||||||
def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
|
def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@@ -1279,6 +1344,11 @@ def parse_args(argv):
|
|||||||
help="Only generate source code, do not compile.",
|
help="Only generate source code, do not compile.",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
)
|
)
|
||||||
|
parser_compile.add_argument(
|
||||||
|
"--native-idf",
|
||||||
|
help="Build with native ESP-IDF instead of PlatformIO (ESP32 esp-idf framework only).",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
|
||||||
parser_upload = subparsers.add_parser(
|
parser_upload = subparsers.add_parser(
|
||||||
"upload",
|
"upload",
|
||||||
@@ -1360,6 +1430,11 @@ def parse_args(argv):
|
|||||||
help="Reset the device before starting serial logs.",
|
help="Reset the device before starting serial logs.",
|
||||||
default=os.getenv("ESPHOME_SERIAL_LOGGING_RESET"),
|
default=os.getenv("ESPHOME_SERIAL_LOGGING_RESET"),
|
||||||
)
|
)
|
||||||
|
parser_run.add_argument(
|
||||||
|
"--native-idf",
|
||||||
|
help="Build with native ESP-IDF instead of PlatformIO (ESP32 esp-idf framework only).",
|
||||||
|
action="store_true",
|
||||||
|
)
|
||||||
|
|
||||||
parser_clean = subparsers.add_parser(
|
parser_clean = subparsers.add_parser(
|
||||||
"clean-mqtt",
|
"clean-mqtt",
|
||||||
@@ -1528,38 +1603,48 @@ def run_esphome(argv):
|
|||||||
|
|
||||||
_LOGGER.info("ESPHome %s", const.__version__)
|
_LOGGER.info("ESPHome %s", const.__version__)
|
||||||
|
|
||||||
for conf_path in args.configuration:
|
# Multiple configurations: use subprocesses to avoid state leakage
|
||||||
conf_path = Path(conf_path)
|
# between compilations (e.g., LVGL touchscreen state in module globals)
|
||||||
if any(conf_path.name == x for x in SECRETS_FILES):
|
if len(args.configuration) > 1:
|
||||||
_LOGGER.warning("Skipping secrets file %s", conf_path)
|
# Build command by reusing argv, replacing all configs with single file
|
||||||
continue
|
# argv[0] is the program path, skip it since we prefix with "esphome"
|
||||||
|
def build_command(f):
|
||||||
|
return (
|
||||||
|
["esphome"]
|
||||||
|
+ [arg for arg in argv[1:] if arg not in args.configuration]
|
||||||
|
+ [str(f)]
|
||||||
|
)
|
||||||
|
|
||||||
CORE.config_path = conf_path
|
return run_multiple_configs(args.configuration, build_command)
|
||||||
CORE.dashboard = args.dashboard
|
|
||||||
|
|
||||||
# For logs command, skip updating external components
|
# Single configuration
|
||||||
skip_external = args.command == "logs"
|
conf_path = Path(args.configuration[0])
|
||||||
config = read_config(
|
if any(conf_path.name == x for x in SECRETS_FILES):
|
||||||
dict(args.substitution) if args.substitution else {},
|
_LOGGER.warning("Skipping secrets file %s", conf_path)
|
||||||
skip_external_update=skip_external,
|
return 0
|
||||||
)
|
|
||||||
if config is None:
|
|
||||||
return 2
|
|
||||||
CORE.config = config
|
|
||||||
|
|
||||||
if args.command not in POST_CONFIG_ACTIONS:
|
CORE.config_path = conf_path
|
||||||
safe_print(f"Unknown command {args.command}")
|
CORE.dashboard = args.dashboard
|
||||||
|
|
||||||
try:
|
# For logs command, skip updating external components
|
||||||
rc = POST_CONFIG_ACTIONS[args.command](args, config)
|
skip_external = args.command == "logs"
|
||||||
except EsphomeError as e:
|
config = read_config(
|
||||||
_LOGGER.error(e, exc_info=args.verbose)
|
dict(args.substitution) if args.substitution else {},
|
||||||
return 1
|
skip_external_update=skip_external,
|
||||||
if rc != 0:
|
)
|
||||||
return rc
|
if config is None:
|
||||||
|
return 2
|
||||||
|
CORE.config = config
|
||||||
|
|
||||||
CORE.reset()
|
if args.command not in POST_CONFIG_ACTIONS:
|
||||||
return 0
|
safe_print(f"Unknown command {args.command}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
return POST_CONFIG_ACTIONS[args.command](args, config)
|
||||||
|
except EsphomeError as e:
|
||||||
|
_LOGGER.error(e, exc_info=args.verbose)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from .const import (
|
|||||||
CORE_SUBCATEGORY_PATTERNS,
|
CORE_SUBCATEGORY_PATTERNS,
|
||||||
DEMANGLED_PATTERNS,
|
DEMANGLED_PATTERNS,
|
||||||
ESPHOME_COMPONENT_PATTERN,
|
ESPHOME_COMPONENT_PATTERN,
|
||||||
SECTION_TO_ATTR,
|
|
||||||
SYMBOL_PATTERNS,
|
SYMBOL_PATTERNS,
|
||||||
)
|
)
|
||||||
from .demangle import batch_demangle
|
from .demangle import batch_demangle
|
||||||
@@ -22,7 +21,7 @@ from .helpers import (
|
|||||||
map_section_name,
|
map_section_name,
|
||||||
parse_symbol_line,
|
parse_symbol_line,
|
||||||
)
|
)
|
||||||
from .toolchain import find_tool, run_tool
|
from .toolchain import find_tool, resolve_tool_path, run_tool
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from esphome.platformio_api import IDEData
|
from esphome.platformio_api import IDEData
|
||||||
@@ -91,6 +90,17 @@ class ComponentMemory:
|
|||||||
bss_size: int = 0 # Uninitialized data (ram only)
|
bss_size: int = 0 # Uninitialized data (ram only)
|
||||||
symbol_count: int = 0
|
symbol_count: int = 0
|
||||||
|
|
||||||
|
def add_section_size(self, section_name: str, size: int) -> None:
|
||||||
|
"""Add size to the appropriate attribute for a section."""
|
||||||
|
if section_name == ".text":
|
||||||
|
self.text_size += size
|
||||||
|
elif section_name == ".rodata":
|
||||||
|
self.rodata_size += size
|
||||||
|
elif section_name == ".data":
|
||||||
|
self.data_size += size
|
||||||
|
elif section_name == ".bss":
|
||||||
|
self.bss_size += size
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def flash_total(self) -> int:
|
def flash_total(self) -> int:
|
||||||
"""Total flash usage (text + rodata + data)."""
|
"""Total flash usage (text + rodata + data)."""
|
||||||
@@ -132,6 +142,12 @@ class MemoryAnalyzer:
|
|||||||
readelf_path = readelf_path or idedata.readelf_path
|
readelf_path = readelf_path or idedata.readelf_path
|
||||||
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
|
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
|
||||||
|
|
||||||
|
# Validate paths exist, fall back to find_tool if they don't
|
||||||
|
# This handles cases like Zephyr where cc_path doesn't include full path
|
||||||
|
# and the toolchain prefix may differ (e.g., arm-zephyr-eabi- vs arm-none-eabi-)
|
||||||
|
objdump_path = resolve_tool_path("objdump", objdump_path, objdump_path)
|
||||||
|
readelf_path = resolve_tool_path("readelf", readelf_path, objdump_path)
|
||||||
|
|
||||||
self.objdump_path = objdump_path or "objdump"
|
self.objdump_path = objdump_path or "objdump"
|
||||||
self.readelf_path = readelf_path or "readelf"
|
self.readelf_path = readelf_path or "readelf"
|
||||||
self.external_components = external_components or set()
|
self.external_components = external_components or set()
|
||||||
@@ -161,12 +177,15 @@ class MemoryAnalyzer:
|
|||||||
self._elf_symbol_names: set[str] = set()
|
self._elf_symbol_names: set[str] = set()
|
||||||
# SDK symbols not in ELF (static/local symbols from closed-source libs)
|
# SDK symbols not in ELF (static/local symbols from closed-source libs)
|
||||||
self._sdk_symbols: list[SDKSymbol] = []
|
self._sdk_symbols: list[SDKSymbol] = []
|
||||||
|
# CSWTCH symbols: list of (name, size, source_file, component)
|
||||||
|
self._cswtch_symbols: list[tuple[str, int, str, str]] = []
|
||||||
|
|
||||||
def analyze(self) -> dict[str, ComponentMemory]:
|
def analyze(self) -> dict[str, ComponentMemory]:
|
||||||
"""Analyze the ELF file and return component memory usage."""
|
"""Analyze the ELF file and return component memory usage."""
|
||||||
self._parse_sections()
|
self._parse_sections()
|
||||||
self._parse_symbols()
|
self._parse_symbols()
|
||||||
self._categorize_symbols()
|
self._categorize_symbols()
|
||||||
|
self._analyze_cswtch_symbols()
|
||||||
self._analyze_sdk_libraries()
|
self._analyze_sdk_libraries()
|
||||||
return dict(self.components)
|
return dict(self.components)
|
||||||
|
|
||||||
@@ -249,8 +268,7 @@ class MemoryAnalyzer:
|
|||||||
comp_mem.symbol_count += 1
|
comp_mem.symbol_count += 1
|
||||||
|
|
||||||
# Update the appropriate size attribute based on section
|
# Update the appropriate size attribute based on section
|
||||||
if attr_name := SECTION_TO_ATTR.get(section_name):
|
comp_mem.add_section_size(section_name, size)
|
||||||
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
|
|
||||||
|
|
||||||
# Track uncategorized symbols
|
# Track uncategorized symbols
|
||||||
if component == "other" and size > 0:
|
if component == "other" and size > 0:
|
||||||
@@ -366,6 +384,205 @@ class MemoryAnalyzer:
|
|||||||
|
|
||||||
return "Other Core"
|
return "Other Core"
|
||||||
|
|
||||||
|
def _find_object_files_dir(self) -> Path | None:
|
||||||
|
"""Find the directory containing object files for this build.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the directory containing .o files, or None if not found.
|
||||||
|
"""
|
||||||
|
# The ELF is typically at .pioenvs/<env>/firmware.elf
|
||||||
|
# Object files are in .pioenvs/<env>/src/ and .pioenvs/<env>/lib*/
|
||||||
|
pioenvs_dir = self.elf_path.parent
|
||||||
|
if pioenvs_dir.exists() and any(pioenvs_dir.glob("src/*.o")):
|
||||||
|
return pioenvs_dir
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _scan_cswtch_in_objects(
|
||||||
|
self, obj_dir: Path
|
||||||
|
) -> dict[str, list[tuple[str, int]]]:
|
||||||
|
"""Scan object files for CSWTCH symbols using a single nm invocation.
|
||||||
|
|
||||||
|
Uses ``nm --print-file-name -S`` on all ``.o`` files at once.
|
||||||
|
Output format: ``/path/to/file.o:address size type name``
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj_dir: Directory containing object files (.pioenvs/<env>/)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping "CSWTCH$NNN:size" to list of (source_file, size) tuples.
|
||||||
|
"""
|
||||||
|
cswtch_map: dict[str, list[tuple[str, int]]] = defaultdict(list)
|
||||||
|
|
||||||
|
if not self.nm_path:
|
||||||
|
return cswtch_map
|
||||||
|
|
||||||
|
# Find all .o files recursively, sorted for deterministic output
|
||||||
|
obj_files = sorted(obj_dir.rglob("*.o"))
|
||||||
|
if not obj_files:
|
||||||
|
return cswtch_map
|
||||||
|
|
||||||
|
_LOGGER.debug("Scanning %d object files for CSWTCH symbols", len(obj_files))
|
||||||
|
|
||||||
|
# Single nm call with --print-file-name for all object files
|
||||||
|
result = run_tool(
|
||||||
|
[self.nm_path, "--print-file-name", "-S"] + [str(f) for f in obj_files],
|
||||||
|
timeout=30,
|
||||||
|
)
|
||||||
|
if result is None or result.returncode != 0:
|
||||||
|
return cswtch_map
|
||||||
|
|
||||||
|
for line in result.stdout.splitlines():
|
||||||
|
if "CSWTCH$" not in line:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Split on last ":" that precedes a hex address.
|
||||||
|
# nm --print-file-name format: filepath:hex_addr hex_size type name
|
||||||
|
# We split from the right: find the last colon followed by hex digits.
|
||||||
|
parts_after_colon = line.rsplit(":", 1)
|
||||||
|
if len(parts_after_colon) != 2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_path = parts_after_colon[0]
|
||||||
|
fields = parts_after_colon[1].split()
|
||||||
|
# fields: [address, size, type, name]
|
||||||
|
if len(fields) < 4:
|
||||||
|
continue
|
||||||
|
|
||||||
|
sym_name = fields[3]
|
||||||
|
if not sym_name.startswith("CSWTCH$"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
size = int(fields[1], 16)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get relative path from obj_dir for readability
|
||||||
|
try:
|
||||||
|
rel_path = str(Path(file_path).relative_to(obj_dir))
|
||||||
|
except ValueError:
|
||||||
|
rel_path = file_path
|
||||||
|
|
||||||
|
key = f"{sym_name}:{size}"
|
||||||
|
cswtch_map[key].append((rel_path, size))
|
||||||
|
|
||||||
|
return cswtch_map
|
||||||
|
|
||||||
|
def _source_file_to_component(self, source_file: str) -> str:
|
||||||
|
"""Map a source object file path to its component name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_file: Relative path like 'src/esphome/components/wifi/wifi_component.cpp.o'
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Component name like '[esphome]wifi' or the source file if unknown.
|
||||||
|
"""
|
||||||
|
parts = Path(source_file).parts
|
||||||
|
|
||||||
|
# ESPHome component: src/esphome/components/<name>/...
|
||||||
|
if "components" in parts:
|
||||||
|
idx = parts.index("components")
|
||||||
|
if idx + 1 < len(parts):
|
||||||
|
component_name = parts[idx + 1]
|
||||||
|
if component_name in get_esphome_components():
|
||||||
|
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
|
||||||
|
if component_name in self.external_components:
|
||||||
|
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
|
||||||
|
|
||||||
|
# ESPHome core: src/esphome/core/... or src/esphome/...
|
||||||
|
if "core" in parts and "esphome" in parts:
|
||||||
|
return _COMPONENT_CORE
|
||||||
|
if "esphome" in parts and "components" not in parts:
|
||||||
|
return _COMPONENT_CORE
|
||||||
|
|
||||||
|
# Framework/library files - return the first path component
|
||||||
|
# e.g., lib65b/ESPAsyncTCP/... -> lib65b
|
||||||
|
# FrameworkArduino/... -> FrameworkArduino
|
||||||
|
return parts[0] if parts else source_file
|
||||||
|
|
||||||
|
def _analyze_cswtch_symbols(self) -> None:
|
||||||
|
"""Analyze CSWTCH (GCC switch table) symbols by tracing to source objects.
|
||||||
|
|
||||||
|
CSWTCH symbols are compiler-generated lookup tables for switch statements.
|
||||||
|
They are local symbols, so the same name can appear in different object files.
|
||||||
|
This method scans .o files to attribute them to their source components.
|
||||||
|
"""
|
||||||
|
obj_dir = self._find_object_files_dir()
|
||||||
|
if obj_dir is None:
|
||||||
|
_LOGGER.debug("No object files directory found, skipping CSWTCH analysis")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Scan object files for CSWTCH symbols
|
||||||
|
cswtch_map = self._scan_cswtch_in_objects(obj_dir)
|
||||||
|
if not cswtch_map:
|
||||||
|
_LOGGER.debug("No CSWTCH symbols found in object files")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Collect CSWTCH symbols from the ELF (already parsed in sections)
|
||||||
|
# Include section_name for re-attribution of component totals
|
||||||
|
elf_cswtch = [
|
||||||
|
(symbol_name, size, section_name)
|
||||||
|
for section_name, section in self.sections.items()
|
||||||
|
for symbol_name, size, _ in section.symbols
|
||||||
|
if symbol_name.startswith("CSWTCH$")
|
||||||
|
]
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Found %d CSWTCH symbols in ELF, %d unique in object files",
|
||||||
|
len(elf_cswtch),
|
||||||
|
len(cswtch_map),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Match ELF CSWTCH symbols to source files and re-attribute component totals.
|
||||||
|
# _categorize_symbols() already ran and put these into "other" since CSWTCH$
|
||||||
|
# names don't match any component pattern. We move the bytes to the correct
|
||||||
|
# component based on the object file mapping.
|
||||||
|
other_mem = self.components.get("other")
|
||||||
|
|
||||||
|
for sym_name, size, section_name in elf_cswtch:
|
||||||
|
key = f"{sym_name}:{size}"
|
||||||
|
sources = cswtch_map.get(key, [])
|
||||||
|
|
||||||
|
if len(sources) == 1:
|
||||||
|
source_file = sources[0][0]
|
||||||
|
component = self._source_file_to_component(source_file)
|
||||||
|
elif len(sources) > 1:
|
||||||
|
# Ambiguous - multiple object files have same CSWTCH name+size
|
||||||
|
source_file = "ambiguous"
|
||||||
|
component = "ambiguous"
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Ambiguous CSWTCH %s (%d B) found in %d files: %s",
|
||||||
|
sym_name,
|
||||||
|
size,
|
||||||
|
len(sources),
|
||||||
|
", ".join(src for src, _ in sources),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
source_file = "unknown"
|
||||||
|
component = "unknown"
|
||||||
|
|
||||||
|
self._cswtch_symbols.append((sym_name, size, source_file, component))
|
||||||
|
|
||||||
|
# Re-attribute from "other" to the correct component
|
||||||
|
if (
|
||||||
|
component not in ("other", "unknown", "ambiguous")
|
||||||
|
and other_mem is not None
|
||||||
|
):
|
||||||
|
other_mem.add_section_size(section_name, -size)
|
||||||
|
if component not in self.components:
|
||||||
|
self.components[component] = ComponentMemory(component)
|
||||||
|
self.components[component].add_section_size(section_name, size)
|
||||||
|
|
||||||
|
# Sort by size descending
|
||||||
|
self._cswtch_symbols.sort(key=lambda x: x[1], reverse=True)
|
||||||
|
|
||||||
|
total_size = sum(size for _, size, _, _ in self._cswtch_symbols)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"CSWTCH analysis: %d symbols, %d bytes total",
|
||||||
|
len(self._cswtch_symbols),
|
||||||
|
total_size,
|
||||||
|
)
|
||||||
|
|
||||||
def get_unattributed_ram(self) -> tuple[int, int, int]:
|
def get_unattributed_ram(self) -> tuple[int, int, int]:
|
||||||
"""Get unattributed RAM sizes (SDK/framework overhead).
|
"""Get unattributed RAM sizes (SDK/framework overhead).
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
|
import heapq
|
||||||
|
from operator import itemgetter
|
||||||
import sys
|
import sys
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
@@ -29,6 +31,10 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
)
|
)
|
||||||
# Lower threshold for RAM symbols (RAM is more constrained)
|
# Lower threshold for RAM symbols (RAM is more constrained)
|
||||||
RAM_SYMBOL_SIZE_THRESHOLD: int = 24
|
RAM_SYMBOL_SIZE_THRESHOLD: int = 24
|
||||||
|
# Number of top symbols to show in the largest symbols report
|
||||||
|
TOP_SYMBOLS_LIMIT: int = 30
|
||||||
|
# Width for symbol name display in top symbols report
|
||||||
|
COL_TOP_SYMBOL_NAME: int = 55
|
||||||
|
|
||||||
# Column width constants
|
# Column width constants
|
||||||
COL_COMPONENT: int = 29
|
COL_COMPONENT: int = 29
|
||||||
@@ -147,6 +153,83 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
section_label = f" [{section[1:]}]" # .data -> [data], .bss -> [bss]
|
section_label = f" [{section[1:]}]" # .data -> [data], .bss -> [bss]
|
||||||
return f"{demangled} ({size:,} B){section_label}"
|
return f"{demangled} ({size:,} B){section_label}"
|
||||||
|
|
||||||
|
def _add_top_symbols(self, lines: list[str]) -> None:
|
||||||
|
"""Add a section showing the top largest symbols in the binary."""
|
||||||
|
# Collect all symbols from all components: (symbol, demangled, size, section, component)
|
||||||
|
all_symbols = [
|
||||||
|
(symbol, demangled, size, section, component)
|
||||||
|
for component, symbols in self._component_symbols.items()
|
||||||
|
for symbol, demangled, size, section in symbols
|
||||||
|
]
|
||||||
|
|
||||||
|
# Get top N symbols by size using heapq for efficiency
|
||||||
|
top_symbols = heapq.nlargest(
|
||||||
|
self.TOP_SYMBOLS_LIMIT, all_symbols, key=itemgetter(2)
|
||||||
|
)
|
||||||
|
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"Top {self.TOP_SYMBOLS_LIMIT} Largest Symbols:")
|
||||||
|
# Calculate truncation limit from column width (leaving room for "...")
|
||||||
|
truncate_limit = self.COL_TOP_SYMBOL_NAME - 3
|
||||||
|
for i, (_, demangled, size, section, component) in enumerate(top_symbols):
|
||||||
|
# Format section label
|
||||||
|
section_label = f"[{section[1:]}]" if section else ""
|
||||||
|
# Truncate demangled name if too long
|
||||||
|
demangled_display = (
|
||||||
|
f"{demangled[:truncate_limit]}..."
|
||||||
|
if len(demangled) > self.COL_TOP_SYMBOL_NAME
|
||||||
|
else demangled
|
||||||
|
)
|
||||||
|
lines.append(
|
||||||
|
f"{i + 1:>2}. {size:>7,} B {section_label:<8} {demangled_display:<{self.COL_TOP_SYMBOL_NAME}} {component}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _add_cswtch_analysis(self, lines: list[str]) -> None:
|
||||||
|
"""Add CSWTCH (GCC switch table lookup) analysis section."""
|
||||||
|
self._add_section_header(lines, "CSWTCH Analysis (GCC Switch Table Lookups)")
|
||||||
|
|
||||||
|
total_size = sum(size for _, size, _, _ in self._cswtch_symbols)
|
||||||
|
lines.append(
|
||||||
|
f"Total: {len(self._cswtch_symbols)} switch table(s), {total_size:,} B"
|
||||||
|
)
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Group by component
|
||||||
|
by_component: dict[str, list[tuple[str, int, str]]] = defaultdict(list)
|
||||||
|
for sym_name, size, source_file, component in self._cswtch_symbols:
|
||||||
|
by_component[component].append((sym_name, size, source_file))
|
||||||
|
|
||||||
|
# Sort components by total size descending
|
||||||
|
sorted_components = sorted(
|
||||||
|
by_component.items(),
|
||||||
|
key=lambda x: sum(s[1] for s in x[1]),
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
for component, symbols in sorted_components:
|
||||||
|
comp_total = sum(s[1] for s in symbols)
|
||||||
|
lines.append(f"{component} ({comp_total:,} B, {len(symbols)} tables):")
|
||||||
|
|
||||||
|
# Group by source file within component
|
||||||
|
by_file: dict[str, list[tuple[str, int]]] = defaultdict(list)
|
||||||
|
for sym_name, size, source_file in symbols:
|
||||||
|
by_file[source_file].append((sym_name, size))
|
||||||
|
|
||||||
|
for source_file, file_symbols in sorted(
|
||||||
|
by_file.items(),
|
||||||
|
key=lambda x: sum(s[1] for s in x[1]),
|
||||||
|
reverse=True,
|
||||||
|
):
|
||||||
|
file_total = sum(s[1] for s in file_symbols)
|
||||||
|
lines.append(
|
||||||
|
f" {source_file} ({file_total:,} B, {len(file_symbols)} tables)"
|
||||||
|
)
|
||||||
|
for sym_name, size in sorted(
|
||||||
|
file_symbols, key=lambda x: x[1], reverse=True
|
||||||
|
):
|
||||||
|
lines.append(f" {size:>6,} B {sym_name}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
def generate_report(self, detailed: bool = False) -> str:
|
def generate_report(self, detailed: bool = False) -> str:
|
||||||
"""Generate a formatted memory report."""
|
"""Generate a formatted memory report."""
|
||||||
components = sorted(
|
components = sorted(
|
||||||
@@ -248,6 +331,9 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
"RAM",
|
"RAM",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Top largest symbols in the binary
|
||||||
|
self._add_top_symbols(lines)
|
||||||
|
|
||||||
# Add ESPHome core detailed analysis if there are core symbols
|
# Add ESPHome core detailed analysis if there are core symbols
|
||||||
if self._esphome_core_symbols:
|
if self._esphome_core_symbols:
|
||||||
self._add_section_header(lines, f"{_COMPONENT_CORE} Detailed Analysis")
|
self._add_section_header(lines, f"{_COMPONENT_CORE} Detailed Analysis")
|
||||||
@@ -431,6 +517,10 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
|||||||
lines.append(f" ... and {len(large_ram_syms) - 10} more")
|
lines.append(f" ... and {len(large_ram_syms) - 10} more")
|
||||||
lines.append("")
|
lines.append("")
|
||||||
|
|
||||||
|
# CSWTCH (GCC switch table) analysis
|
||||||
|
if self._cswtch_symbols:
|
||||||
|
self._add_cswtch_analysis(lines)
|
||||||
|
|
||||||
lines.append(
|
lines.append(
|
||||||
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
|
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,20 +9,61 @@ ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::")
|
|||||||
# Maps standard section names to their various platform-specific variants
|
# Maps standard section names to their various platform-specific variants
|
||||||
# Note: Order matters! More specific patterns (.bss) must come before general ones (.dram)
|
# Note: Order matters! More specific patterns (.bss) must come before general ones (.dram)
|
||||||
# because ESP-IDF uses names like ".dram0.bss" which would match ".dram" otherwise
|
# because ESP-IDF uses names like ".dram0.bss" which would match ".dram" otherwise
|
||||||
|
#
|
||||||
|
# Platform-specific sections:
|
||||||
|
# - ESP8266/ESP32: .iram*, .dram*
|
||||||
|
# - LibreTiny RTL87xx: .xip.code_* (flash), .ram.code_* (RAM)
|
||||||
|
# - LibreTiny BK7231: .itcm.code (fast RAM), .vectors (interrupt vectors)
|
||||||
|
# - LibreTiny LN882X: .flash_text, .flash_copy* (flash code)
|
||||||
|
# - Zephyr/nRF52: text, rodata, datas, bss (no leading dots)
|
||||||
SECTION_MAPPING = {
|
SECTION_MAPPING = {
|
||||||
".text": frozenset([".text", ".iram"]),
|
".text": frozenset(
|
||||||
".rodata": frozenset([".rodata"]),
|
[
|
||||||
".bss": frozenset([".bss"]), # Must be before .data to catch ".dram0.bss"
|
".text",
|
||||||
".data": frozenset([".data", ".dram"]),
|
".iram",
|
||||||
}
|
# LibreTiny RTL87xx XIP (eXecute In Place) flash code
|
||||||
|
".xip.code",
|
||||||
# Section to ComponentMemory attribute mapping
|
# LibreTiny RTL87xx RAM code
|
||||||
# Maps section names to the attribute name in ComponentMemory dataclass
|
".ram.code_text",
|
||||||
SECTION_TO_ATTR = {
|
# LibreTiny BK7231 fast RAM code and vectors
|
||||||
".text": "text_size",
|
".itcm.code",
|
||||||
".rodata": "rodata_size",
|
".vectors",
|
||||||
".data": "data_size",
|
# LibreTiny LN882X flash code
|
||||||
".bss": "bss_size",
|
".flash_text",
|
||||||
|
".flash_copy",
|
||||||
|
# Zephyr/nRF52 sections (no leading dots)
|
||||||
|
"text",
|
||||||
|
"rom_start",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
".rodata": frozenset(
|
||||||
|
[
|
||||||
|
".rodata",
|
||||||
|
# LibreTiny RTL87xx read-only data in RAM
|
||||||
|
".ram.code_rodata",
|
||||||
|
# Zephyr/nRF52 sections (no leading dots)
|
||||||
|
"rodata",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
# .bss patterns - must be before .data to catch ".dram0.bss"
|
||||||
|
".bss": frozenset(
|
||||||
|
[
|
||||||
|
".bss",
|
||||||
|
# LibreTiny LN882X BSS
|
||||||
|
".bss_ram",
|
||||||
|
# Zephyr/nRF52 sections (no leading dots)
|
||||||
|
"bss",
|
||||||
|
"noinit",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
".data": frozenset(
|
||||||
|
[
|
||||||
|
".data",
|
||||||
|
".dram",
|
||||||
|
# Zephyr/nRF52 sections (no leading dots)
|
||||||
|
"datas",
|
||||||
|
]
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Component identification rules
|
# Component identification rules
|
||||||
@@ -463,7 +504,9 @@ SYMBOL_PATTERNS = {
|
|||||||
"__FUNCTION__$",
|
"__FUNCTION__$",
|
||||||
"DAYS_IN_MONTH",
|
"DAYS_IN_MONTH",
|
||||||
"_DAYS_BEFORE_MONTH",
|
"_DAYS_BEFORE_MONTH",
|
||||||
"CSWTCH$",
|
# Note: CSWTCH$ symbols are GCC switch table lookup tables.
|
||||||
|
# They are attributed to their source object files via _analyze_cswtch_symbols()
|
||||||
|
# rather than being lumped into libc.
|
||||||
"dst$",
|
"dst$",
|
||||||
"sulp",
|
"sulp",
|
||||||
"_strtol_l", # String to long with locale
|
"_strtol_l", # String to long with locale
|
||||||
|
|||||||
@@ -94,13 +94,13 @@ def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Find section, size, and name
|
# Find section, size, and name
|
||||||
|
# Try each part as a potential section name
|
||||||
for i, part in enumerate(parts):
|
for i, part in enumerate(parts):
|
||||||
if not part.startswith("."):
|
# Skip parts that are clearly flags, addresses, or other metadata
|
||||||
continue
|
# Sections start with '.' (standard ELF) or are known section names (Zephyr)
|
||||||
|
|
||||||
section = map_section_name(part)
|
section = map_section_name(part)
|
||||||
if not section:
|
if not section:
|
||||||
break
|
continue
|
||||||
|
|
||||||
# Need at least size field after section
|
# Need at least size field after section
|
||||||
if i + 1 >= len(parts):
|
if i + 1 >= len(parts):
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
@@ -17,10 +18,82 @@ TOOLCHAIN_PREFIXES = [
|
|||||||
"xtensa-lx106-elf-", # ESP8266
|
"xtensa-lx106-elf-", # ESP8266
|
||||||
"xtensa-esp32-elf-", # ESP32
|
"xtensa-esp32-elf-", # ESP32
|
||||||
"xtensa-esp-elf-", # ESP32 (newer IDF)
|
"xtensa-esp-elf-", # ESP32 (newer IDF)
|
||||||
|
"arm-zephyr-eabi-", # nRF52/Zephyr SDK
|
||||||
|
"arm-none-eabi-", # Generic ARM (RP2040, etc.)
|
||||||
"", # System default (no prefix)
|
"", # System default (no prefix)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _find_in_platformio_packages(tool_name: str) -> str | None:
|
||||||
|
"""Search for a tool in PlatformIO package directories.
|
||||||
|
|
||||||
|
This handles cases like Zephyr SDK where tools are installed in nested
|
||||||
|
directories that aren't in PATH.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tool_name: Name of the tool (e.g., "readelf", "objdump")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Full path to the tool or None if not found
|
||||||
|
"""
|
||||||
|
# Get PlatformIO packages directory
|
||||||
|
platformio_home = Path(os.path.expanduser("~/.platformio/packages"))
|
||||||
|
if not platformio_home.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Search patterns for toolchains that might contain the tool
|
||||||
|
# Order matters - more specific patterns first
|
||||||
|
search_patterns = [
|
||||||
|
# Zephyr SDK deeply nested structure (4 levels)
|
||||||
|
# e.g., toolchain-gccarmnoneeabi/zephyr-sdk-0.17.4/arm-zephyr-eabi/bin/arm-zephyr-eabi-objdump
|
||||||
|
f"toolchain-*/*/*/bin/*-{tool_name}",
|
||||||
|
# Zephyr SDK nested structure (3 levels)
|
||||||
|
f"toolchain-*/*/bin/*-{tool_name}",
|
||||||
|
f"toolchain-*/bin/*-{tool_name}",
|
||||||
|
# Standard PlatformIO toolchain structure
|
||||||
|
f"toolchain-*/bin/*{tool_name}",
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in search_patterns:
|
||||||
|
matches = list(platformio_home.glob(pattern))
|
||||||
|
if matches:
|
||||||
|
# Sort to get consistent results, prefer arm-zephyr-eabi over arm-none-eabi
|
||||||
|
matches.sort(key=lambda p: ("zephyr" not in str(p), str(p)))
|
||||||
|
tool_path = str(matches[0])
|
||||||
|
_LOGGER.debug("Found %s in PlatformIO packages: %s", tool_name, tool_path)
|
||||||
|
return tool_path
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_tool_path(
|
||||||
|
tool_name: str,
|
||||||
|
derived_path: str | None,
|
||||||
|
objdump_path: str | None = None,
|
||||||
|
) -> str | None:
|
||||||
|
"""Resolve a tool path, falling back to find_tool if derived path doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tool_name: Name of the tool (e.g., "objdump", "readelf")
|
||||||
|
derived_path: Path derived from idedata (may not exist for some platforms)
|
||||||
|
objdump_path: Path to objdump binary to derive other tool paths from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Resolved path to the tool, or the original derived_path if it exists
|
||||||
|
"""
|
||||||
|
if derived_path and not Path(derived_path).exists():
|
||||||
|
found = find_tool(tool_name, objdump_path)
|
||||||
|
if found:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Derived %s path %s not found, using %s",
|
||||||
|
tool_name,
|
||||||
|
derived_path,
|
||||||
|
found,
|
||||||
|
)
|
||||||
|
return found
|
||||||
|
return derived_path
|
||||||
|
|
||||||
|
|
||||||
def find_tool(
|
def find_tool(
|
||||||
tool_name: str,
|
tool_name: str,
|
||||||
objdump_path: str | None = None,
|
objdump_path: str | None = None,
|
||||||
@@ -28,7 +101,8 @@ def find_tool(
|
|||||||
"""Find a toolchain tool by name.
|
"""Find a toolchain tool by name.
|
||||||
|
|
||||||
First tries to derive the tool path from objdump_path (if provided),
|
First tries to derive the tool path from objdump_path (if provided),
|
||||||
then falls back to searching for platform-specific tools.
|
then searches PlatformIO package directories (for cross-compile toolchains),
|
||||||
|
and finally falls back to searching for platform-specific tools in PATH.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
tool_name: Name of the tool (e.g., "objdump", "nm", "c++filt")
|
tool_name: Name of the tool (e.g., "objdump", "nm", "c++filt")
|
||||||
@@ -47,7 +121,13 @@ def find_tool(
|
|||||||
_LOGGER.debug("Found %s at: %s", tool_name, potential_path)
|
_LOGGER.debug("Found %s at: %s", tool_name, potential_path)
|
||||||
return potential_path
|
return potential_path
|
||||||
|
|
||||||
# Try platform-specific tools
|
# Search in PlatformIO packages directory first (handles Zephyr SDK, etc.)
|
||||||
|
# This must come before PATH search because system tools (e.g., /usr/bin/objdump)
|
||||||
|
# are for the host architecture, not the target (ARM, Xtensa, etc.)
|
||||||
|
if found := _find_in_platformio_packages(tool_name):
|
||||||
|
return found
|
||||||
|
|
||||||
|
# Try platform-specific tools in PATH (fallback for when tools are installed globally)
|
||||||
for prefix in TOOLCHAIN_PREFIXES:
|
for prefix in TOOLCHAIN_PREFIXES:
|
||||||
cmd = f"{prefix}{tool_name}"
|
cmd = f"{prefix}{tool_name}"
|
||||||
try:
|
try:
|
||||||
|
|||||||
139
esphome/build_gen/espidf.py
Normal file
139
esphome/build_gen/espidf.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""ESP-IDF direct build generator for ESPHome."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from esphome.components.esp32 import get_esp32_variant
|
||||||
|
from esphome.core import CORE
|
||||||
|
from esphome.helpers import mkdir_p, write_file_if_changed
|
||||||
|
|
||||||
|
|
||||||
|
def get_available_components() -> list[str] | None:
|
||||||
|
"""Get list of available ESP-IDF components from project_description.json.
|
||||||
|
|
||||||
|
Returns only internal ESP-IDF components, excluding external/managed
|
||||||
|
components (from idf_component.yml).
|
||||||
|
"""
|
||||||
|
project_desc = Path(CORE.build_path) / "build" / "project_description.json"
|
||||||
|
if not project_desc.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(project_desc, encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
component_info = data.get("build_component_info", {})
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for name, info in component_info.items():
|
||||||
|
# Exclude our own src component
|
||||||
|
if name == "src":
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Exclude managed/external components
|
||||||
|
comp_dir = info.get("dir", "")
|
||||||
|
if "managed_components" in comp_dir:
|
||||||
|
continue
|
||||||
|
|
||||||
|
result.append(name)
|
||||||
|
|
||||||
|
return result
|
||||||
|
except (json.JSONDecodeError, OSError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def has_discovered_components() -> bool:
|
||||||
|
"""Check if we have discovered components from a previous configure."""
|
||||||
|
return get_available_components() is not None
|
||||||
|
|
||||||
|
|
||||||
|
def get_project_cmakelists() -> str:
|
||||||
|
"""Generate the top-level CMakeLists.txt for ESP-IDF project."""
|
||||||
|
# Get IDF target from ESP32 variant (e.g., ESP32S3 -> esp32s3)
|
||||||
|
variant = get_esp32_variant()
|
||||||
|
idf_target = variant.lower().replace("-", "")
|
||||||
|
|
||||||
|
return f"""\
|
||||||
|
# Auto-generated by ESPHome
|
||||||
|
cmake_minimum_required(VERSION 3.16)
|
||||||
|
|
||||||
|
set(IDF_TARGET {idf_target})
|
||||||
|
set(EXTRA_COMPONENT_DIRS ${{CMAKE_SOURCE_DIR}}/src)
|
||||||
|
|
||||||
|
include($ENV{{IDF_PATH}}/tools/cmake/project.cmake)
|
||||||
|
project({CORE.name})
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def get_component_cmakelists(minimal: bool = False) -> str:
|
||||||
|
"""Generate the main component CMakeLists.txt."""
|
||||||
|
idf_requires = [] if minimal else (get_available_components() or [])
|
||||||
|
requires_str = " ".join(idf_requires)
|
||||||
|
|
||||||
|
# Extract compile definitions from build flags (-DXXX -> XXX)
|
||||||
|
compile_defs = [flag[2:] for flag in CORE.build_flags if flag.startswith("-D")]
|
||||||
|
compile_defs_str = "\n ".join(compile_defs) if compile_defs else ""
|
||||||
|
|
||||||
|
# Extract compile options (-W flags, excluding linker flags)
|
||||||
|
compile_opts = [
|
||||||
|
flag
|
||||||
|
for flag in CORE.build_flags
|
||||||
|
if flag.startswith("-W") and not flag.startswith("-Wl,")
|
||||||
|
]
|
||||||
|
compile_opts_str = "\n ".join(compile_opts) if compile_opts else ""
|
||||||
|
|
||||||
|
# Extract linker options (-Wl, flags)
|
||||||
|
link_opts = [flag for flag in CORE.build_flags if flag.startswith("-Wl,")]
|
||||||
|
link_opts_str = "\n ".join(link_opts) if link_opts else ""
|
||||||
|
|
||||||
|
return f"""\
|
||||||
|
# Auto-generated by ESPHome
|
||||||
|
file(GLOB_RECURSE app_sources
|
||||||
|
"${{CMAKE_CURRENT_SOURCE_DIR}}/*.cpp"
|
||||||
|
"${{CMAKE_CURRENT_SOURCE_DIR}}/*.c"
|
||||||
|
"${{CMAKE_CURRENT_SOURCE_DIR}}/esphome/*.cpp"
|
||||||
|
"${{CMAKE_CURRENT_SOURCE_DIR}}/esphome/*.c"
|
||||||
|
)
|
||||||
|
|
||||||
|
idf_component_register(
|
||||||
|
SRCS ${{app_sources}}
|
||||||
|
INCLUDE_DIRS "." "esphome"
|
||||||
|
REQUIRES {requires_str}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Apply C++ standard
|
||||||
|
target_compile_features(${{COMPONENT_LIB}} PUBLIC cxx_std_20)
|
||||||
|
|
||||||
|
# ESPHome compile definitions
|
||||||
|
target_compile_definitions(${{COMPONENT_LIB}} PUBLIC
|
||||||
|
{compile_defs_str}
|
||||||
|
)
|
||||||
|
|
||||||
|
# ESPHome compile options
|
||||||
|
target_compile_options(${{COMPONENT_LIB}} PUBLIC
|
||||||
|
{compile_opts_str}
|
||||||
|
)
|
||||||
|
|
||||||
|
# ESPHome linker options
|
||||||
|
target_link_options(${{COMPONENT_LIB}} PUBLIC
|
||||||
|
{link_opts_str}
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def write_project(minimal: bool = False) -> None:
|
||||||
|
"""Write ESP-IDF project files."""
|
||||||
|
mkdir_p(CORE.build_path)
|
||||||
|
mkdir_p(CORE.relative_src_path())
|
||||||
|
|
||||||
|
# Write top-level CMakeLists.txt
|
||||||
|
write_file_if_changed(
|
||||||
|
CORE.relative_build_path("CMakeLists.txt"),
|
||||||
|
get_project_cmakelists(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write component CMakeLists.txt in src/
|
||||||
|
write_file_if_changed(
|
||||||
|
CORE.relative_src_path("CMakeLists.txt"),
|
||||||
|
get_component_cmakelists(minimal=minimal),
|
||||||
|
)
|
||||||
@@ -69,6 +69,7 @@ from esphome.cpp_types import ( # noqa: F401
|
|||||||
JsonObjectConst,
|
JsonObjectConst,
|
||||||
Parented,
|
Parented,
|
||||||
PollingComponent,
|
PollingComponent,
|
||||||
|
StringRef,
|
||||||
arduino_json_ns,
|
arduino_json_ns,
|
||||||
bool_,
|
bool_,
|
||||||
const_char_ptr,
|
const_char_ptr,
|
||||||
@@ -86,6 +87,7 @@ from esphome.cpp_types import ( # noqa: F401
|
|||||||
size_t,
|
size_t,
|
||||||
std_ns,
|
std_ns,
|
||||||
std_shared_ptr,
|
std_shared_ptr,
|
||||||
|
std_span,
|
||||||
std_string,
|
std_string,
|
||||||
std_string_ref,
|
std_string_ref,
|
||||||
std_vector,
|
std_vector,
|
||||||
|
|||||||
@@ -45,8 +45,6 @@ void AbsoluteHumidityComponent::dump_config() {
|
|||||||
this->temperature_sensor_->get_name().c_str(), this->humidity_sensor_->get_name().c_str());
|
this->temperature_sensor_->get_name().c_str(), this->humidity_sensor_->get_name().c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
float AbsoluteHumidityComponent::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AbsoluteHumidityComponent::loop() {
|
void AbsoluteHumidityComponent::loop() {
|
||||||
if (!this->next_update_) {
|
if (!this->next_update_) {
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class AbsoluteHumidityComponent : public sensor::Sensor, public Component {
|
|||||||
|
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void loop() override;
|
void loop() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -68,11 +68,6 @@ class ADCSensor : public sensor::Sensor, public PollingComponent, public voltage
|
|||||||
/// This method is called during the ESPHome setup process to log the configuration.
|
/// This method is called during the ESPHome setup process to log the configuration.
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
|
|
||||||
/// Return the setup priority for this component.
|
|
||||||
/// Components with higher priority are initialized earlier during setup.
|
|
||||||
/// @return A float representing the setup priority.
|
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
#ifdef USE_ZEPHYR
|
#ifdef USE_ZEPHYR
|
||||||
/// Set the ADC channel to be used by the ADC sensor.
|
/// Set the ADC channel to be used by the ADC sensor.
|
||||||
/// @param channel Pointer to an adc_dt_spec structure representing the ADC channel.
|
/// @param channel Pointer to an adc_dt_spec structure representing the ADC channel.
|
||||||
|
|||||||
@@ -79,7 +79,5 @@ void ADCSensor::set_sample_count(uint8_t sample_count) {
|
|||||||
|
|
||||||
void ADCSensor::set_sampling_mode(SamplingMode sampling_mode) { this->sampling_mode_ = sampling_mode; }
|
void ADCSensor::set_sampling_mode(SamplingMode sampling_mode) { this->sampling_mode_ = sampling_mode; }
|
||||||
|
|
||||||
float ADCSensor::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
} // namespace adc
|
} // namespace adc
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -42,11 +42,11 @@ void ADCSensor::setup() {
|
|||||||
adc_oneshot_unit_init_cfg_t init_config = {}; // Zero initialize
|
adc_oneshot_unit_init_cfg_t init_config = {}; // Zero initialize
|
||||||
init_config.unit_id = this->adc_unit_;
|
init_config.unit_id = this->adc_unit_;
|
||||||
init_config.ulp_mode = ADC_ULP_MODE_DISABLE;
|
init_config.ulp_mode = ADC_ULP_MODE_DISABLE;
|
||||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
#if USE_ESP32_VARIANT_ESP32C2 || USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || \
|
||||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
USE_ESP32_VARIANT_ESP32C6 || USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
||||||
init_config.clk_src = ADC_DIGI_CLK_SRC_DEFAULT;
|
init_config.clk_src = ADC_DIGI_CLK_SRC_DEFAULT;
|
||||||
#endif // USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 ||
|
#endif // USE_ESP32_VARIANT_ESP32C2 || USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 ||
|
||||||
// USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
// USE_ESP32_VARIANT_ESP32C6 || USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2
|
||||||
esp_err_t err = adc_oneshot_new_unit(&init_config, &ADCSensor::shared_adc_handles[this->adc_unit_]);
|
esp_err_t err = adc_oneshot_new_unit(&init_config, &ADCSensor::shared_adc_handles[this->adc_unit_]);
|
||||||
if (err != ESP_OK) {
|
if (err != ESP_OK) {
|
||||||
ESP_LOGE(TAG, "Error initializing %s: %d", LOG_STR_ARG(adc_unit_to_str(this->adc_unit_)), err);
|
ESP_LOGE(TAG, "Error initializing %s: %d", LOG_STR_ARG(adc_unit_to_str(this->adc_unit_)), err);
|
||||||
@@ -76,7 +76,7 @@ void ADCSensor::setup() {
|
|||||||
|
|
||||||
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
|
||||||
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
USE_ESP32_VARIANT_ESP32C61 || USE_ESP32_VARIANT_ESP32H2 || USE_ESP32_VARIANT_ESP32P4 || USE_ESP32_VARIANT_ESP32S3
|
||||||
// RISC-V variants and S3 use curve fitting calibration
|
// RISC-V variants (except C2) and S3 use curve fitting calibration
|
||||||
adc_cali_curve_fitting_config_t cali_config = {}; // Zero initialize first
|
adc_cali_curve_fitting_config_t cali_config = {}; // Zero initialize first
|
||||||
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 3, 0)
|
#if ESP_IDF_VERSION >= ESP_IDF_VERSION_VAL(5, 3, 0)
|
||||||
cali_config.chan = this->channel_;
|
cali_config.chan = this->channel_;
|
||||||
@@ -94,14 +94,14 @@ void ADCSensor::setup() {
|
|||||||
ESP_LOGW(TAG, "Curve fitting calibration failed with error %d, will use uncalibrated readings", err);
|
ESP_LOGW(TAG, "Curve fitting calibration failed with error %d, will use uncalibrated readings", err);
|
||||||
this->setup_flags_.calibration_complete = false;
|
this->setup_flags_.calibration_complete = false;
|
||||||
}
|
}
|
||||||
#else // Other ESP32 variants use line fitting calibration
|
#else // ESP32, ESP32-S2, and ESP32-C2 use line fitting calibration
|
||||||
adc_cali_line_fitting_config_t cali_config = {
|
adc_cali_line_fitting_config_t cali_config = {
|
||||||
.unit_id = this->adc_unit_,
|
.unit_id = this->adc_unit_,
|
||||||
.atten = this->attenuation_,
|
.atten = this->attenuation_,
|
||||||
.bitwidth = ADC_BITWIDTH_DEFAULT,
|
.bitwidth = ADC_BITWIDTH_DEFAULT,
|
||||||
#if !defined(USE_ESP32_VARIANT_ESP32S2)
|
#if !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
|
||||||
.default_vref = 1100, // Default reference voltage in mV
|
.default_vref = 1100, // Default reference voltage in mV
|
||||||
#endif // !defined(USE_ESP32_VARIANT_ESP32S2)
|
#endif // !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
|
||||||
};
|
};
|
||||||
err = adc_cali_create_scheme_line_fitting(&cali_config, &handle);
|
err = adc_cali_create_scheme_line_fitting(&cali_config, &handle);
|
||||||
if (err == ESP_OK) {
|
if (err == ESP_OK) {
|
||||||
@@ -112,7 +112,7 @@ void ADCSensor::setup() {
|
|||||||
ESP_LOGW(TAG, "Line fitting calibration failed with error %d, will use uncalibrated readings", err);
|
ESP_LOGW(TAG, "Line fitting calibration failed with error %d, will use uncalibrated readings", err);
|
||||||
this->setup_flags_.calibration_complete = false;
|
this->setup_flags_.calibration_complete = false;
|
||||||
}
|
}
|
||||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
#endif // ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
||||||
}
|
}
|
||||||
|
|
||||||
this->setup_flags_.init_complete = true;
|
this->setup_flags_.init_complete = true;
|
||||||
@@ -189,7 +189,7 @@ float ADCSensor::sample_fixed_attenuation_() {
|
|||||||
adc_cali_delete_scheme_curve_fitting(this->calibration_handle_);
|
adc_cali_delete_scheme_curve_fitting(this->calibration_handle_);
|
||||||
#else // Other ESP32 variants use line fitting calibration
|
#else // Other ESP32 variants use line fitting calibration
|
||||||
adc_cali_delete_scheme_line_fitting(this->calibration_handle_);
|
adc_cali_delete_scheme_line_fitting(this->calibration_handle_);
|
||||||
#endif // USE_ESP32_VARIANT_ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
#endif // ESP32C3 || ESP32C5 || ESP32C6 || ESP32C61 || ESP32H2 || ESP32P4 || ESP32S3
|
||||||
this->calibration_handle_ = nullptr;
|
this->calibration_handle_ = nullptr;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -247,7 +247,7 @@ float ADCSensor::sample_autorange_() {
|
|||||||
.unit_id = this->adc_unit_,
|
.unit_id = this->adc_unit_,
|
||||||
.atten = atten,
|
.atten = atten,
|
||||||
.bitwidth = ADC_BITWIDTH_DEFAULT,
|
.bitwidth = ADC_BITWIDTH_DEFAULT,
|
||||||
#if !defined(USE_ESP32_VARIANT_ESP32S2)
|
#if !defined(USE_ESP32_VARIANT_ESP32S2) && !defined(USE_ESP32_VARIANT_ESP32C2)
|
||||||
.default_vref = 1100,
|
.default_vref = 1100,
|
||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import logging
|
|||||||
|
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
from esphome.components import sensor, voltage_sampler
|
from esphome.components import sensor, voltage_sampler
|
||||||
from esphome.components.esp32 import get_esp32_variant
|
from esphome.components.esp32 import get_esp32_variant, include_builtin_idf_component
|
||||||
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
|
from esphome.components.nrf52.const import AIN_TO_GPIO, EXTRA_ADC
|
||||||
from esphome.components.zephyr import (
|
from esphome.components.zephyr import (
|
||||||
zephyr_add_overlay,
|
zephyr_add_overlay,
|
||||||
@@ -118,6 +118,9 @@ async def to_code(config):
|
|||||||
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
|
cg.add(var.set_sampling_mode(config[CONF_SAMPLING_MODE]))
|
||||||
|
|
||||||
if CORE.is_esp32:
|
if CORE.is_esp32:
|
||||||
|
# Re-enable ESP-IDF's ADC driver (excluded by default to save compile time)
|
||||||
|
include_builtin_idf_component("esp_adc")
|
||||||
|
|
||||||
if attenuation := config.get(CONF_ATTENUATION):
|
if attenuation := config.get(CONF_ATTENUATION):
|
||||||
if attenuation == "auto":
|
if attenuation == "auto":
|
||||||
cg.add(var.set_autorange(cg.global_ns.true))
|
cg.add(var.set_autorange(cg.global_ns.true))
|
||||||
@@ -160,21 +163,21 @@ async def to_code(config):
|
|||||||
zephyr_add_user("io-channels", f"<&adc {channel_id}>")
|
zephyr_add_user("io-channels", f"<&adc {channel_id}>")
|
||||||
zephyr_add_overlay(
|
zephyr_add_overlay(
|
||||||
f"""
|
f"""
|
||||||
&adc {{
|
&adc {{
|
||||||
#address-cells = <1>;
|
#address-cells = <1>;
|
||||||
#size-cells = <0>;
|
#size-cells = <0>;
|
||||||
|
|
||||||
channel@{channel_id} {{
|
channel@{channel_id} {{
|
||||||
reg = <{channel_id}>;
|
reg = <{channel_id}>;
|
||||||
zephyr,gain = "{gain}";
|
zephyr,gain = "{gain}";
|
||||||
zephyr,reference = "ADC_REF_INTERNAL";
|
zephyr,reference = "ADC_REF_INTERNAL";
|
||||||
zephyr,acquisition-time = <ADC_ACQ_TIME_DEFAULT>;
|
zephyr,acquisition-time = <ADC_ACQ_TIME_DEFAULT>;
|
||||||
zephyr,input-positive = <NRF_SAADC_{pin_number}>;
|
zephyr,input-positive = <NRF_SAADC_{pin_number}>;
|
||||||
zephyr,resolution = <14>;
|
zephyr,resolution = <14>;
|
||||||
zephyr,oversampling = <8>;
|
zephyr,oversampling = <8>;
|
||||||
}};
|
}};
|
||||||
}};
|
}};
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ static const char *const TAG = "adc128s102.sensor";
|
|||||||
|
|
||||||
ADC128S102Sensor::ADC128S102Sensor(uint8_t channel) : channel_(channel) {}
|
ADC128S102Sensor::ADC128S102Sensor(uint8_t channel) : channel_(channel) {}
|
||||||
|
|
||||||
float ADC128S102Sensor::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void ADC128S102Sensor::dump_config() {
|
void ADC128S102Sensor::dump_config() {
|
||||||
LOG_SENSOR("", "ADC128S102 Sensor", this);
|
LOG_SENSOR("", "ADC128S102 Sensor", this);
|
||||||
ESP_LOGCONFIG(TAG, " Pin: %u", this->channel_);
|
ESP_LOGCONFIG(TAG, " Pin: %u", this->channel_);
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ class ADC128S102Sensor : public PollingComponent,
|
|||||||
|
|
||||||
void update() override;
|
void update() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
float sample() override;
|
float sample() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -150,8 +150,6 @@ void AHT10Component::update() {
|
|||||||
this->restart_read_();
|
this->restart_read_();
|
||||||
}
|
}
|
||||||
|
|
||||||
float AHT10Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AHT10Component::dump_config() {
|
void AHT10Component::dump_config() {
|
||||||
ESP_LOGCONFIG(TAG, "AHT10:");
|
ESP_LOGCONFIG(TAG, "AHT10:");
|
||||||
LOG_I2C_DEVICE(this);
|
LOG_I2C_DEVICE(this);
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ class AHT10Component : public PollingComponent, public i2c::I2CDevice {
|
|||||||
void setup() override;
|
void setup() override;
|
||||||
void update() override;
|
void update() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void set_variant(AHT10Variant variant) { this->variant_ = variant; }
|
void set_variant(AHT10Variant variant) { this->variant_ = variant; }
|
||||||
|
|
||||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
||||||
|
|||||||
@@ -67,52 +67,29 @@ void AlarmControlPanel::add_on_ready_callback(std::function<void()> &&callback)
|
|||||||
this->ready_callback_.add(std::move(callback));
|
this->ready_callback_.add(std::move(callback));
|
||||||
}
|
}
|
||||||
|
|
||||||
void AlarmControlPanel::arm_away(optional<std::string> code) {
|
void AlarmControlPanel::arm_with_code_(AlarmControlPanelCall &(AlarmControlPanelCall::*arm_method)(),
|
||||||
|
const char *code) {
|
||||||
auto call = this->make_call();
|
auto call = this->make_call();
|
||||||
call.arm_away();
|
(call.*arm_method)();
|
||||||
if (code.has_value())
|
if (code != nullptr)
|
||||||
call.set_code(code.value());
|
call.set_code(code);
|
||||||
call.perform();
|
call.perform();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AlarmControlPanel::arm_home(optional<std::string> code) {
|
void AlarmControlPanel::arm_away(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_away, code); }
|
||||||
auto call = this->make_call();
|
|
||||||
call.arm_home();
|
void AlarmControlPanel::arm_home(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_home, code); }
|
||||||
if (code.has_value())
|
|
||||||
call.set_code(code.value());
|
void AlarmControlPanel::arm_night(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::arm_night, code); }
|
||||||
call.perform();
|
|
||||||
|
void AlarmControlPanel::arm_vacation(const char *code) {
|
||||||
|
this->arm_with_code_(&AlarmControlPanelCall::arm_vacation, code);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AlarmControlPanel::arm_night(optional<std::string> code) {
|
void AlarmControlPanel::arm_custom_bypass(const char *code) {
|
||||||
auto call = this->make_call();
|
this->arm_with_code_(&AlarmControlPanelCall::arm_custom_bypass, code);
|
||||||
call.arm_night();
|
|
||||||
if (code.has_value())
|
|
||||||
call.set_code(code.value());
|
|
||||||
call.perform();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void AlarmControlPanel::arm_vacation(optional<std::string> code) {
|
void AlarmControlPanel::disarm(const char *code) { this->arm_with_code_(&AlarmControlPanelCall::disarm, code); }
|
||||||
auto call = this->make_call();
|
|
||||||
call.arm_vacation();
|
|
||||||
if (code.has_value())
|
|
||||||
call.set_code(code.value());
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
void AlarmControlPanel::arm_custom_bypass(optional<std::string> code) {
|
|
||||||
auto call = this->make_call();
|
|
||||||
call.arm_custom_bypass();
|
|
||||||
if (code.has_value())
|
|
||||||
call.set_code(code.value());
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
void AlarmControlPanel::disarm(optional<std::string> code) {
|
|
||||||
auto call = this->make_call();
|
|
||||||
call.disarm();
|
|
||||||
if (code.has_value())
|
|
||||||
call.set_code(code.value());
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace esphome::alarm_control_panel
|
} // namespace esphome::alarm_control_panel
|
||||||
|
|||||||
@@ -76,37 +76,53 @@ class AlarmControlPanel : public EntityBase {
|
|||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void arm_away(optional<std::string> code = nullopt);
|
void arm_away(const char *code = nullptr);
|
||||||
|
void arm_away(const optional<std::string> &code) {
|
||||||
|
this->arm_away(code.has_value() ? code.value().c_str() : nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
/** arm the alarm in home mode
|
/** arm the alarm in home mode
|
||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void arm_home(optional<std::string> code = nullopt);
|
void arm_home(const char *code = nullptr);
|
||||||
|
void arm_home(const optional<std::string> &code) {
|
||||||
|
this->arm_home(code.has_value() ? code.value().c_str() : nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
/** arm the alarm in night mode
|
/** arm the alarm in night mode
|
||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void arm_night(optional<std::string> code = nullopt);
|
void arm_night(const char *code = nullptr);
|
||||||
|
void arm_night(const optional<std::string> &code) {
|
||||||
|
this->arm_night(code.has_value() ? code.value().c_str() : nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
/** arm the alarm in vacation mode
|
/** arm the alarm in vacation mode
|
||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void arm_vacation(optional<std::string> code = nullopt);
|
void arm_vacation(const char *code = nullptr);
|
||||||
|
void arm_vacation(const optional<std::string> &code) {
|
||||||
|
this->arm_vacation(code.has_value() ? code.value().c_str() : nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
/** arm the alarm in custom bypass mode
|
/** arm the alarm in custom bypass mode
|
||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void arm_custom_bypass(optional<std::string> code = nullopt);
|
void arm_custom_bypass(const char *code = nullptr);
|
||||||
|
void arm_custom_bypass(const optional<std::string> &code) {
|
||||||
|
this->arm_custom_bypass(code.has_value() ? code.value().c_str() : nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
/** disarm the alarm
|
/** disarm the alarm
|
||||||
*
|
*
|
||||||
* @param code The code
|
* @param code The code
|
||||||
*/
|
*/
|
||||||
void disarm(optional<std::string> code = nullopt);
|
void disarm(const char *code = nullptr);
|
||||||
|
void disarm(const optional<std::string> &code) { this->disarm(code.has_value() ? code.value().c_str() : nullptr); }
|
||||||
|
|
||||||
/** Get the state
|
/** Get the state
|
||||||
*
|
*
|
||||||
@@ -118,6 +134,8 @@ class AlarmControlPanel : public EntityBase {
|
|||||||
|
|
||||||
protected:
|
protected:
|
||||||
friend AlarmControlPanelCall;
|
friend AlarmControlPanelCall;
|
||||||
|
// Helper to reduce code duplication for arm/disarm methods
|
||||||
|
void arm_with_code_(AlarmControlPanelCall &(AlarmControlPanelCall::*arm_method)(), const char *code);
|
||||||
// in order to store last panel state in flash
|
// in order to store last panel state in flash
|
||||||
ESPPreferenceObject pref_;
|
ESPPreferenceObject pref_;
|
||||||
// current state
|
// current state
|
||||||
|
|||||||
@@ -10,8 +10,10 @@ static const char *const TAG = "alarm_control_panel";
|
|||||||
|
|
||||||
AlarmControlPanelCall::AlarmControlPanelCall(AlarmControlPanel *parent) : parent_(parent) {}
|
AlarmControlPanelCall::AlarmControlPanelCall(AlarmControlPanel *parent) : parent_(parent) {}
|
||||||
|
|
||||||
AlarmControlPanelCall &AlarmControlPanelCall::set_code(const std::string &code) {
|
AlarmControlPanelCall &AlarmControlPanelCall::set_code(const char *code) {
|
||||||
this->code_ = code;
|
if (code != nullptr) {
|
||||||
|
this->code_ = std::string(code);
|
||||||
|
}
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ class AlarmControlPanelCall {
|
|||||||
public:
|
public:
|
||||||
AlarmControlPanelCall(AlarmControlPanel *parent);
|
AlarmControlPanelCall(AlarmControlPanel *parent);
|
||||||
|
|
||||||
AlarmControlPanelCall &set_code(const std::string &code);
|
AlarmControlPanelCall &set_code(const char *code);
|
||||||
|
AlarmControlPanelCall &set_code(const std::string &code) { return this->set_code(code.c_str()); }
|
||||||
AlarmControlPanelCall &arm_away();
|
AlarmControlPanelCall &arm_away();
|
||||||
AlarmControlPanelCall &arm_home();
|
AlarmControlPanelCall &arm_home();
|
||||||
AlarmControlPanelCall &arm_night();
|
AlarmControlPanelCall &arm_night();
|
||||||
|
|||||||
@@ -1,32 +1,15 @@
|
|||||||
#include "alarm_control_panel_state.h"
|
#include "alarm_control_panel_state.h"
|
||||||
|
#include "esphome/core/progmem.h"
|
||||||
|
|
||||||
namespace esphome::alarm_control_panel {
|
namespace esphome::alarm_control_panel {
|
||||||
|
|
||||||
|
// Alarm control panel state strings indexed by AlarmControlPanelState enum (0-9)
|
||||||
|
PROGMEM_STRING_TABLE(AlarmControlPanelStateStrings, "DISARMED", "ARMED_HOME", "ARMED_AWAY", "ARMED_NIGHT",
|
||||||
|
"ARMED_VACATION", "ARMED_CUSTOM_BYPASS", "PENDING", "ARMING", "DISARMING", "TRIGGERED", "UNKNOWN");
|
||||||
|
|
||||||
const LogString *alarm_control_panel_state_to_string(AlarmControlPanelState state) {
|
const LogString *alarm_control_panel_state_to_string(AlarmControlPanelState state) {
|
||||||
switch (state) {
|
return AlarmControlPanelStateStrings::get_log_str(static_cast<uint8_t>(state),
|
||||||
case ACP_STATE_DISARMED:
|
AlarmControlPanelStateStrings::LAST_INDEX);
|
||||||
return LOG_STR("DISARMED");
|
|
||||||
case ACP_STATE_ARMED_HOME:
|
|
||||||
return LOG_STR("ARMED_HOME");
|
|
||||||
case ACP_STATE_ARMED_AWAY:
|
|
||||||
return LOG_STR("ARMED_AWAY");
|
|
||||||
case ACP_STATE_ARMED_NIGHT:
|
|
||||||
return LOG_STR("ARMED_NIGHT");
|
|
||||||
case ACP_STATE_ARMED_VACATION:
|
|
||||||
return LOG_STR("ARMED_VACATION");
|
|
||||||
case ACP_STATE_ARMED_CUSTOM_BYPASS:
|
|
||||||
return LOG_STR("ARMED_CUSTOM_BYPASS");
|
|
||||||
case ACP_STATE_PENDING:
|
|
||||||
return LOG_STR("PENDING");
|
|
||||||
case ACP_STATE_ARMING:
|
|
||||||
return LOG_STR("ARMING");
|
|
||||||
case ACP_STATE_DISARMING:
|
|
||||||
return LOG_STR("DISARMING");
|
|
||||||
case ACP_STATE_TRIGGERED:
|
|
||||||
return LOG_STR("TRIGGERED");
|
|
||||||
default:
|
|
||||||
return LOG_STR("UNKNOWN");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace esphome::alarm_control_panel
|
} // namespace esphome::alarm_control_panel
|
||||||
|
|||||||
@@ -66,15 +66,7 @@ template<typename... Ts> class ArmAwayAction : public Action<Ts...> {
|
|||||||
|
|
||||||
TEMPLATABLE_VALUE(std::string, code)
|
TEMPLATABLE_VALUE(std::string, code)
|
||||||
|
|
||||||
void play(const Ts &...x) override {
|
void play(const Ts &...x) override { this->alarm_control_panel_->arm_away(this->code_.optional_value(x...)); }
|
||||||
auto call = this->alarm_control_panel_->make_call();
|
|
||||||
auto code = this->code_.optional_value(x...);
|
|
||||||
if (code.has_value()) {
|
|
||||||
call.set_code(code.value());
|
|
||||||
}
|
|
||||||
call.arm_away();
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
AlarmControlPanel *alarm_control_panel_;
|
AlarmControlPanel *alarm_control_panel_;
|
||||||
@@ -86,15 +78,7 @@ template<typename... Ts> class ArmHomeAction : public Action<Ts...> {
|
|||||||
|
|
||||||
TEMPLATABLE_VALUE(std::string, code)
|
TEMPLATABLE_VALUE(std::string, code)
|
||||||
|
|
||||||
void play(const Ts &...x) override {
|
void play(const Ts &...x) override { this->alarm_control_panel_->arm_home(this->code_.optional_value(x...)); }
|
||||||
auto call = this->alarm_control_panel_->make_call();
|
|
||||||
auto code = this->code_.optional_value(x...);
|
|
||||||
if (code.has_value()) {
|
|
||||||
call.set_code(code.value());
|
|
||||||
}
|
|
||||||
call.arm_home();
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
AlarmControlPanel *alarm_control_panel_;
|
AlarmControlPanel *alarm_control_panel_;
|
||||||
@@ -106,15 +90,7 @@ template<typename... Ts> class ArmNightAction : public Action<Ts...> {
|
|||||||
|
|
||||||
TEMPLATABLE_VALUE(std::string, code)
|
TEMPLATABLE_VALUE(std::string, code)
|
||||||
|
|
||||||
void play(const Ts &...x) override {
|
void play(const Ts &...x) override { this->alarm_control_panel_->arm_night(this->code_.optional_value(x...)); }
|
||||||
auto call = this->alarm_control_panel_->make_call();
|
|
||||||
auto code = this->code_.optional_value(x...);
|
|
||||||
if (code.has_value()) {
|
|
||||||
call.set_code(code.value());
|
|
||||||
}
|
|
||||||
call.arm_night();
|
|
||||||
call.perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
AlarmControlPanel *alarm_control_panel_;
|
AlarmControlPanel *alarm_control_panel_;
|
||||||
|
|||||||
@@ -176,7 +176,5 @@ void AM2315C::dump_config() {
|
|||||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||||
}
|
}
|
||||||
|
|
||||||
float AM2315C::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
} // namespace am2315c
|
} // namespace am2315c
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ class AM2315C : public PollingComponent, public i2c::I2CDevice {
|
|||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
void update() override;
|
void update() override;
|
||||||
void setup() override;
|
void setup() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; }
|
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { this->temperature_sensor_ = temperature_sensor; }
|
||||||
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
||||||
|
|||||||
@@ -51,7 +51,6 @@ void AM2320Component::dump_config() {
|
|||||||
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
LOG_SENSOR(" ", "Temperature", this->temperature_sensor_);
|
||||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||||
}
|
}
|
||||||
float AM2320Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
bool AM2320Component::read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion) {
|
bool AM2320Component::read_bytes_(uint8_t a_register, uint8_t *data, uint8_t len, uint32_t conversion) {
|
||||||
if (!this->write_bytes(a_register, data, 2)) {
|
if (!this->write_bytes(a_register, data, 2)) {
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ class AM2320Component : public PollingComponent, public i2c::I2CDevice {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
void set_temperature_sensor(sensor::Sensor *temperature_sensor) { temperature_sensor_ = temperature_sensor; }
|
||||||
|
|||||||
@@ -1,21 +1,12 @@
|
|||||||
#include "am43_base.h"
|
#include "am43_base.h"
|
||||||
|
#include "esphome/core/helpers.h"
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <cstdio>
|
|
||||||
|
|
||||||
namespace esphome {
|
namespace esphome {
|
||||||
namespace am43 {
|
namespace am43 {
|
||||||
|
|
||||||
const uint8_t START_PACKET[5] = {0x00, 0xff, 0x00, 0x00, 0x9a};
|
const uint8_t START_PACKET[5] = {0x00, 0xff, 0x00, 0x00, 0x9a};
|
||||||
|
|
||||||
std::string pkt_to_hex(const uint8_t *data, uint16_t len) {
|
|
||||||
char buf[64];
|
|
||||||
memset(buf, 0, 64);
|
|
||||||
for (int i = 0; i < len; i++)
|
|
||||||
sprintf(&buf[i * 2], "%02x", data[i]);
|
|
||||||
std::string ret = buf;
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
Am43Packet *Am43Encoder::get_battery_level_request() {
|
Am43Packet *Am43Encoder::get_battery_level_request() {
|
||||||
uint8_t data = 0x1;
|
uint8_t data = 0x1;
|
||||||
return this->encode_(0xA2, &data, 1);
|
return this->encode_(0xA2, &data, 1);
|
||||||
@@ -73,7 +64,9 @@ Am43Packet *Am43Encoder::encode_(uint8_t command, uint8_t *data, uint8_t length)
|
|||||||
memcpy(&this->packet_.data[7], data, length);
|
memcpy(&this->packet_.data[7], data, length);
|
||||||
this->packet_.length = length + 7;
|
this->packet_.length = length + 7;
|
||||||
this->checksum_();
|
this->checksum_();
|
||||||
ESP_LOGV("am43", "ENC(%d): 0x%s", packet_.length, pkt_to_hex(packet_.data, packet_.length).c_str());
|
char hex_buf[format_hex_size(sizeof(this->packet_.data))];
|
||||||
|
ESP_LOGV("am43", "ENC(%d): 0x%s", this->packet_.length,
|
||||||
|
format_hex_to(hex_buf, this->packet_.data, this->packet_.length));
|
||||||
return &this->packet_;
|
return &this->packet_;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +81,8 @@ void Am43Decoder::decode(const uint8_t *data, uint16_t length) {
|
|||||||
this->has_set_state_response_ = false;
|
this->has_set_state_response_ = false;
|
||||||
this->has_position_ = false;
|
this->has_position_ = false;
|
||||||
this->has_pin_response_ = false;
|
this->has_pin_response_ = false;
|
||||||
ESP_LOGV("am43", "DEC(%d): 0x%s", length, pkt_to_hex(data, length).c_str());
|
char hex_buf[format_hex_size(24)]; // Max expected packet size
|
||||||
|
ESP_LOGV("am43", "DEC(%d): 0x%s", length, format_hex_to(hex_buf, data, length));
|
||||||
|
|
||||||
if (length < 2 || data[0] != 0x9a)
|
if (length < 2 || data[0] != 0x9a)
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -18,31 +18,31 @@ AnovaPacket *AnovaCodec::clean_packet_() {
|
|||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_read_device_status_request() {
|
AnovaPacket *AnovaCodec::get_read_device_status_request() {
|
||||||
this->current_query_ = READ_DEVICE_STATUS;
|
this->current_query_ = READ_DEVICE_STATUS;
|
||||||
sprintf((char *) this->packet_.data, "%s", CMD_READ_DEVICE_STATUS);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_DEVICE_STATUS);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_read_target_temp_request() {
|
AnovaPacket *AnovaCodec::get_read_target_temp_request() {
|
||||||
this->current_query_ = READ_TARGET_TEMPERATURE;
|
this->current_query_ = READ_TARGET_TEMPERATURE;
|
||||||
sprintf((char *) this->packet_.data, "%s", CMD_READ_TARGET_TEMP);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_TARGET_TEMP);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_read_current_temp_request() {
|
AnovaPacket *AnovaCodec::get_read_current_temp_request() {
|
||||||
this->current_query_ = READ_CURRENT_TEMPERATURE;
|
this->current_query_ = READ_CURRENT_TEMPERATURE;
|
||||||
sprintf((char *) this->packet_.data, "%s", CMD_READ_CURRENT_TEMP);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_CURRENT_TEMP);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_read_unit_request() {
|
AnovaPacket *AnovaCodec::get_read_unit_request() {
|
||||||
this->current_query_ = READ_UNIT;
|
this->current_query_ = READ_UNIT;
|
||||||
sprintf((char *) this->packet_.data, "%s", CMD_READ_UNIT);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_UNIT);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_read_data_request() {
|
AnovaPacket *AnovaCodec::get_read_data_request() {
|
||||||
this->current_query_ = READ_DATA;
|
this->current_query_ = READ_DATA;
|
||||||
sprintf((char *) this->packet_.data, "%s", CMD_READ_DATA);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_READ_DATA);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,25 +50,25 @@ AnovaPacket *AnovaCodec::get_set_target_temp_request(float temperature) {
|
|||||||
this->current_query_ = SET_TARGET_TEMPERATURE;
|
this->current_query_ = SET_TARGET_TEMPERATURE;
|
||||||
if (this->fahrenheit_)
|
if (this->fahrenheit_)
|
||||||
temperature = ctof(temperature);
|
temperature = ctof(temperature);
|
||||||
sprintf((char *) this->packet_.data, CMD_SET_TARGET_TEMP, temperature);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), CMD_SET_TARGET_TEMP, temperature);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_set_unit_request(char unit) {
|
AnovaPacket *AnovaCodec::get_set_unit_request(char unit) {
|
||||||
this->current_query_ = SET_UNIT;
|
this->current_query_ = SET_UNIT;
|
||||||
sprintf((char *) this->packet_.data, CMD_SET_TEMP_UNIT, unit);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), CMD_SET_TEMP_UNIT, unit);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_start_request() {
|
AnovaPacket *AnovaCodec::get_start_request() {
|
||||||
this->current_query_ = START;
|
this->current_query_ = START;
|
||||||
sprintf((char *) this->packet_.data, CMD_START);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_START);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
AnovaPacket *AnovaCodec::get_stop_request() {
|
AnovaPacket *AnovaCodec::get_stop_request() {
|
||||||
this->current_query_ = STOP;
|
this->current_query_ = STOP;
|
||||||
sprintf((char *) this->packet_.data, CMD_STOP);
|
snprintf((char *) this->packet_.data, sizeof(this->packet_.data), "%s", CMD_STOP);
|
||||||
return this->clean_packet_();
|
return this->clean_packet_();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -384,7 +384,6 @@ void APDS9960::process_dataset_(int up, int down, int left, int right) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
float APDS9960::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
bool APDS9960::is_proximity_enabled_() const {
|
bool APDS9960::is_proximity_enabled_() const {
|
||||||
return
|
return
|
||||||
#ifdef USE_SENSOR
|
#ifdef USE_SENSOR
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ class APDS9960 : public PollingComponent, public i2c::I2CDevice {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
void loop() override;
|
void loop() override;
|
||||||
|
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ service APIConnection {
|
|||||||
rpc time_command (TimeCommandRequest) returns (void) {}
|
rpc time_command (TimeCommandRequest) returns (void) {}
|
||||||
rpc update_command (UpdateCommandRequest) returns (void) {}
|
rpc update_command (UpdateCommandRequest) returns (void) {}
|
||||||
rpc valve_command (ValveCommandRequest) returns (void) {}
|
rpc valve_command (ValveCommandRequest) returns (void) {}
|
||||||
|
rpc water_heater_command (WaterHeaterCommandRequest) returns (void) {}
|
||||||
|
|
||||||
rpc subscribe_bluetooth_le_advertisements(SubscribeBluetoothLEAdvertisementsRequest) returns (void) {}
|
rpc subscribe_bluetooth_le_advertisements(SubscribeBluetoothLEAdvertisementsRequest) returns (void) {}
|
||||||
rpc bluetooth_device_request(BluetoothDeviceRequest) returns (void) {}
|
rpc bluetooth_device_request(BluetoothDeviceRequest) returns (void) {}
|
||||||
|
|||||||
@@ -300,7 +300,7 @@ void APIConnection::on_disconnect_response(const DisconnectResponse &value) {
|
|||||||
// Encodes a message to the buffer and returns the total number of bytes used,
|
// Encodes a message to the buffer and returns the total number of bytes used,
|
||||||
// including header and footer overhead. Returns 0 if the message doesn't fit.
|
// including header and footer overhead. Returns 0 if the message doesn't fit.
|
||||||
uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
|
uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single) {
|
uint32_t remaining_size) {
|
||||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||||
// If in log-only mode, just log and return
|
// If in log-only mode, just log and return
|
||||||
if (conn->flags_.log_only_mode) {
|
if (conn->flags_.log_only_mode) {
|
||||||
@@ -330,12 +330,9 @@ uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint8_t mess
|
|||||||
// Get buffer size after allocation (which includes header padding)
|
// Get buffer size after allocation (which includes header padding)
|
||||||
std::vector<uint8_t> &shared_buf = conn->parent_->get_shared_buffer_ref();
|
std::vector<uint8_t> &shared_buf = conn->parent_->get_shared_buffer_ref();
|
||||||
|
|
||||||
if (is_single || conn->flags_.batch_first_message) {
|
if (conn->flags_.batch_first_message) {
|
||||||
// Single message or first batch message
|
// First message - buffer already prepared by caller, just clear flag
|
||||||
conn->prepare_first_message_buffer(shared_buf, header_padding, total_calculated_size);
|
conn->flags_.batch_first_message = false;
|
||||||
if (conn->flags_.batch_first_message) {
|
|
||||||
conn->flags_.batch_first_message = false;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Batch message second or later
|
// Batch message second or later
|
||||||
// Add padding for previous message footer + this message header
|
// Add padding for previous message footer + this message header
|
||||||
@@ -365,24 +362,22 @@ bool APIConnection::send_binary_sensor_state(binary_sensor::BinarySensor *binary
|
|||||||
BinarySensorStateResponse::ESTIMATED_SIZE);
|
BinarySensorStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
|
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
|
||||||
BinarySensorStateResponse resp;
|
BinarySensorStateResponse resp;
|
||||||
resp.state = binary_sensor->state;
|
resp.state = binary_sensor->state;
|
||||||
resp.missing_state = !binary_sensor->has_state();
|
resp.missing_state = !binary_sensor->has_state();
|
||||||
return fill_and_encode_entity_state(binary_sensor, resp, BinarySensorStateResponse::MESSAGE_TYPE, conn,
|
return fill_and_encode_entity_state(binary_sensor, resp, BinarySensorStateResponse::MESSAGE_TYPE, conn,
|
||||||
remaining_size, is_single);
|
remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
|
auto *binary_sensor = static_cast<binary_sensor::BinarySensor *>(entity);
|
||||||
ListEntitiesBinarySensorResponse msg;
|
ListEntitiesBinarySensorResponse msg;
|
||||||
msg.device_class = binary_sensor->get_device_class_ref();
|
msg.device_class = binary_sensor->get_device_class_ref();
|
||||||
msg.is_status_binary_sensor = binary_sensor->is_status_binary_sensor();
|
msg.is_status_binary_sensor = binary_sensor->is_status_binary_sensor();
|
||||||
return fill_and_encode_entity_info(binary_sensor, msg, ListEntitiesBinarySensorResponse::MESSAGE_TYPE, conn,
|
return fill_and_encode_entity_info(binary_sensor, msg, ListEntitiesBinarySensorResponse::MESSAGE_TYPE, conn,
|
||||||
remaining_size, is_single);
|
remaining_size);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -390,8 +385,7 @@ uint16_t APIConnection::try_send_binary_sensor_info(EntityBase *entity, APIConne
|
|||||||
bool APIConnection::send_cover_state(cover::Cover *cover) {
|
bool APIConnection::send_cover_state(cover::Cover *cover) {
|
||||||
return this->send_message_smart_(cover, CoverStateResponse::MESSAGE_TYPE, CoverStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(cover, CoverStateResponse::MESSAGE_TYPE, CoverStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *cover = static_cast<cover::Cover *>(entity);
|
auto *cover = static_cast<cover::Cover *>(entity);
|
||||||
CoverStateResponse msg;
|
CoverStateResponse msg;
|
||||||
auto traits = cover->get_traits();
|
auto traits = cover->get_traits();
|
||||||
@@ -399,10 +393,9 @@ uint16_t APIConnection::try_send_cover_state(EntityBase *entity, APIConnection *
|
|||||||
if (traits.get_supports_tilt())
|
if (traits.get_supports_tilt())
|
||||||
msg.tilt = cover->tilt;
|
msg.tilt = cover->tilt;
|
||||||
msg.current_operation = static_cast<enums::CoverOperation>(cover->current_operation);
|
msg.current_operation = static_cast<enums::CoverOperation>(cover->current_operation);
|
||||||
return fill_and_encode_entity_state(cover, msg, CoverStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(cover, msg, CoverStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *cover = static_cast<cover::Cover *>(entity);
|
auto *cover = static_cast<cover::Cover *>(entity);
|
||||||
ListEntitiesCoverResponse msg;
|
ListEntitiesCoverResponse msg;
|
||||||
auto traits = cover->get_traits();
|
auto traits = cover->get_traits();
|
||||||
@@ -411,8 +404,7 @@ uint16_t APIConnection::try_send_cover_info(EntityBase *entity, APIConnection *c
|
|||||||
msg.supports_tilt = traits.get_supports_tilt();
|
msg.supports_tilt = traits.get_supports_tilt();
|
||||||
msg.supports_stop = traits.get_supports_stop();
|
msg.supports_stop = traits.get_supports_stop();
|
||||||
msg.device_class = cover->get_device_class_ref();
|
msg.device_class = cover->get_device_class_ref();
|
||||||
return fill_and_encode_entity_info(cover, msg, ListEntitiesCoverResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(cover, msg, ListEntitiesCoverResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::cover_command(const CoverCommandRequest &msg) {
|
void APIConnection::cover_command(const CoverCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(cover::Cover, cover, cover)
|
ENTITY_COMMAND_MAKE_CALL(cover::Cover, cover, cover)
|
||||||
@@ -430,8 +422,7 @@ void APIConnection::cover_command(const CoverCommandRequest &msg) {
|
|||||||
bool APIConnection::send_fan_state(fan::Fan *fan) {
|
bool APIConnection::send_fan_state(fan::Fan *fan) {
|
||||||
return this->send_message_smart_(fan, FanStateResponse::MESSAGE_TYPE, FanStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(fan, FanStateResponse::MESSAGE_TYPE, FanStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *fan = static_cast<fan::Fan *>(entity);
|
auto *fan = static_cast<fan::Fan *>(entity);
|
||||||
FanStateResponse msg;
|
FanStateResponse msg;
|
||||||
auto traits = fan->get_traits();
|
auto traits = fan->get_traits();
|
||||||
@@ -445,10 +436,9 @@ uint16_t APIConnection::try_send_fan_state(EntityBase *entity, APIConnection *co
|
|||||||
msg.direction = static_cast<enums::FanDirection>(fan->direction);
|
msg.direction = static_cast<enums::FanDirection>(fan->direction);
|
||||||
if (traits.supports_preset_modes() && fan->has_preset_mode())
|
if (traits.supports_preset_modes() && fan->has_preset_mode())
|
||||||
msg.preset_mode = fan->get_preset_mode();
|
msg.preset_mode = fan->get_preset_mode();
|
||||||
return fill_and_encode_entity_state(fan, msg, FanStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(fan, msg, FanStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *fan = static_cast<fan::Fan *>(entity);
|
auto *fan = static_cast<fan::Fan *>(entity);
|
||||||
ListEntitiesFanResponse msg;
|
ListEntitiesFanResponse msg;
|
||||||
auto traits = fan->get_traits();
|
auto traits = fan->get_traits();
|
||||||
@@ -457,7 +447,7 @@ uint16_t APIConnection::try_send_fan_info(EntityBase *entity, APIConnection *con
|
|||||||
msg.supports_direction = traits.supports_direction();
|
msg.supports_direction = traits.supports_direction();
|
||||||
msg.supported_speed_count = traits.supported_speed_count();
|
msg.supported_speed_count = traits.supported_speed_count();
|
||||||
msg.supported_preset_modes = &traits.supported_preset_modes();
|
msg.supported_preset_modes = &traits.supported_preset_modes();
|
||||||
return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_info(fan, msg, ListEntitiesFanResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
void APIConnection::fan_command(const FanCommandRequest &msg) {
|
void APIConnection::fan_command(const FanCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(fan::Fan, fan, fan)
|
ENTITY_COMMAND_MAKE_CALL(fan::Fan, fan, fan)
|
||||||
@@ -481,8 +471,7 @@ void APIConnection::fan_command(const FanCommandRequest &msg) {
|
|||||||
bool APIConnection::send_light_state(light::LightState *light) {
|
bool APIConnection::send_light_state(light::LightState *light) {
|
||||||
return this->send_message_smart_(light, LightStateResponse::MESSAGE_TYPE, LightStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(light, LightStateResponse::MESSAGE_TYPE, LightStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *light = static_cast<light::LightState *>(entity);
|
auto *light = static_cast<light::LightState *>(entity);
|
||||||
LightStateResponse resp;
|
LightStateResponse resp;
|
||||||
auto values = light->remote_values;
|
auto values = light->remote_values;
|
||||||
@@ -501,10 +490,9 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
|
|||||||
if (light->supports_effects()) {
|
if (light->supports_effects()) {
|
||||||
resp.effect = light->get_effect_name();
|
resp.effect = light->get_effect_name();
|
||||||
}
|
}
|
||||||
return fill_and_encode_entity_state(light, resp, LightStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(light, resp, LightStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *light = static_cast<light::LightState *>(entity);
|
auto *light = static_cast<light::LightState *>(entity);
|
||||||
ListEntitiesLightResponse msg;
|
ListEntitiesLightResponse msg;
|
||||||
auto traits = light->get_traits();
|
auto traits = light->get_traits();
|
||||||
@@ -527,8 +515,7 @@ uint16_t APIConnection::try_send_light_info(EntityBase *entity, APIConnection *c
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
msg.effects = &effects_list;
|
msg.effects = &effects_list;
|
||||||
return fill_and_encode_entity_info(light, msg, ListEntitiesLightResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(light, msg, ListEntitiesLightResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::light_command(const LightCommandRequest &msg) {
|
void APIConnection::light_command(const LightCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(light::LightState, light, light)
|
ENTITY_COMMAND_MAKE_CALL(light::LightState, light, light)
|
||||||
@@ -568,17 +555,15 @@ bool APIConnection::send_sensor_state(sensor::Sensor *sensor) {
|
|||||||
return this->send_message_smart_(sensor, SensorStateResponse::MESSAGE_TYPE, SensorStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(sensor, SensorStateResponse::MESSAGE_TYPE, SensorStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *sensor = static_cast<sensor::Sensor *>(entity);
|
auto *sensor = static_cast<sensor::Sensor *>(entity);
|
||||||
SensorStateResponse resp;
|
SensorStateResponse resp;
|
||||||
resp.state = sensor->state;
|
resp.state = sensor->state;
|
||||||
resp.missing_state = !sensor->has_state();
|
resp.missing_state = !sensor->has_state();
|
||||||
return fill_and_encode_entity_state(sensor, resp, SensorStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(sensor, resp, SensorStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *sensor = static_cast<sensor::Sensor *>(entity);
|
auto *sensor = static_cast<sensor::Sensor *>(entity);
|
||||||
ListEntitiesSensorResponse msg;
|
ListEntitiesSensorResponse msg;
|
||||||
msg.unit_of_measurement = sensor->get_unit_of_measurement_ref();
|
msg.unit_of_measurement = sensor->get_unit_of_measurement_ref();
|
||||||
@@ -586,8 +571,7 @@ uint16_t APIConnection::try_send_sensor_info(EntityBase *entity, APIConnection *
|
|||||||
msg.force_update = sensor->get_force_update();
|
msg.force_update = sensor->get_force_update();
|
||||||
msg.device_class = sensor->get_device_class_ref();
|
msg.device_class = sensor->get_device_class_ref();
|
||||||
msg.state_class = static_cast<enums::SensorStateClass>(sensor->get_state_class());
|
msg.state_class = static_cast<enums::SensorStateClass>(sensor->get_state_class());
|
||||||
return fill_and_encode_entity_info(sensor, msg, ListEntitiesSensorResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(sensor, msg, ListEntitiesSensorResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -596,23 +580,19 @@ bool APIConnection::send_switch_state(switch_::Switch *a_switch) {
|
|||||||
return this->send_message_smart_(a_switch, SwitchStateResponse::MESSAGE_TYPE, SwitchStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(a_switch, SwitchStateResponse::MESSAGE_TYPE, SwitchStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *a_switch = static_cast<switch_::Switch *>(entity);
|
auto *a_switch = static_cast<switch_::Switch *>(entity);
|
||||||
SwitchStateResponse resp;
|
SwitchStateResponse resp;
|
||||||
resp.state = a_switch->state;
|
resp.state = a_switch->state;
|
||||||
return fill_and_encode_entity_state(a_switch, resp, SwitchStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(a_switch, resp, SwitchStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *a_switch = static_cast<switch_::Switch *>(entity);
|
auto *a_switch = static_cast<switch_::Switch *>(entity);
|
||||||
ListEntitiesSwitchResponse msg;
|
ListEntitiesSwitchResponse msg;
|
||||||
msg.assumed_state = a_switch->assumed_state();
|
msg.assumed_state = a_switch->assumed_state();
|
||||||
msg.device_class = a_switch->get_device_class_ref();
|
msg.device_class = a_switch->get_device_class_ref();
|
||||||
return fill_and_encode_entity_info(a_switch, msg, ListEntitiesSwitchResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(a_switch, msg, ListEntitiesSwitchResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::switch_command(const SwitchCommandRequest &msg) {
|
void APIConnection::switch_command(const SwitchCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_GET(switch_::Switch, a_switch, switch)
|
ENTITY_COMMAND_GET(switch_::Switch, a_switch, switch)
|
||||||
@@ -631,22 +611,19 @@ bool APIConnection::send_text_sensor_state(text_sensor::TextSensor *text_sensor)
|
|||||||
TextSensorStateResponse::ESTIMATED_SIZE);
|
TextSensorStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
|
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
|
||||||
TextSensorStateResponse resp;
|
TextSensorStateResponse resp;
|
||||||
resp.state = StringRef(text_sensor->state);
|
resp.state = StringRef(text_sensor->state);
|
||||||
resp.missing_state = !text_sensor->has_state();
|
resp.missing_state = !text_sensor->has_state();
|
||||||
return fill_and_encode_entity_state(text_sensor, resp, TextSensorStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(text_sensor, resp, TextSensorStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
|
auto *text_sensor = static_cast<text_sensor::TextSensor *>(entity);
|
||||||
ListEntitiesTextSensorResponse msg;
|
ListEntitiesTextSensorResponse msg;
|
||||||
msg.device_class = text_sensor->get_device_class_ref();
|
msg.device_class = text_sensor->get_device_class_ref();
|
||||||
return fill_and_encode_entity_info(text_sensor, msg, ListEntitiesTextSensorResponse::MESSAGE_TYPE, conn,
|
return fill_and_encode_entity_info(text_sensor, msg, ListEntitiesTextSensorResponse::MESSAGE_TYPE, conn,
|
||||||
remaining_size, is_single);
|
remaining_size);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -654,8 +631,7 @@ uint16_t APIConnection::try_send_text_sensor_info(EntityBase *entity, APIConnect
|
|||||||
bool APIConnection::send_climate_state(climate::Climate *climate) {
|
bool APIConnection::send_climate_state(climate::Climate *climate) {
|
||||||
return this->send_message_smart_(climate, ClimateStateResponse::MESSAGE_TYPE, ClimateStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(climate, ClimateStateResponse::MESSAGE_TYPE, ClimateStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *climate = static_cast<climate::Climate *>(entity);
|
auto *climate = static_cast<climate::Climate *>(entity);
|
||||||
ClimateStateResponse resp;
|
ClimateStateResponse resp;
|
||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
@@ -687,11 +663,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
|
|||||||
resp.current_humidity = climate->current_humidity;
|
resp.current_humidity = climate->current_humidity;
|
||||||
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
|
||||||
resp.target_humidity = climate->target_humidity;
|
resp.target_humidity = climate->target_humidity;
|
||||||
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *climate = static_cast<climate::Climate *>(entity);
|
auto *climate = static_cast<climate::Climate *>(entity);
|
||||||
ListEntitiesClimateResponse msg;
|
ListEntitiesClimateResponse msg;
|
||||||
auto traits = climate->get_traits();
|
auto traits = climate->get_traits();
|
||||||
@@ -716,8 +690,7 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
|
|||||||
msg.supported_presets = &traits.get_supported_presets();
|
msg.supported_presets = &traits.get_supported_presets();
|
||||||
msg.supported_custom_presets = &traits.get_supported_custom_presets();
|
msg.supported_custom_presets = &traits.get_supported_custom_presets();
|
||||||
msg.supported_swing_modes = &traits.get_supported_swing_modes();
|
msg.supported_swing_modes = &traits.get_supported_swing_modes();
|
||||||
return fill_and_encode_entity_info(climate, msg, ListEntitiesClimateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(climate, msg, ListEntitiesClimateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::climate_command(const ClimateCommandRequest &msg) {
|
void APIConnection::climate_command(const ClimateCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(climate::Climate, climate, climate)
|
ENTITY_COMMAND_MAKE_CALL(climate::Climate, climate, climate)
|
||||||
@@ -750,17 +723,15 @@ bool APIConnection::send_number_state(number::Number *number) {
|
|||||||
return this->send_message_smart_(number, NumberStateResponse::MESSAGE_TYPE, NumberStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(number, NumberStateResponse::MESSAGE_TYPE, NumberStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *number = static_cast<number::Number *>(entity);
|
auto *number = static_cast<number::Number *>(entity);
|
||||||
NumberStateResponse resp;
|
NumberStateResponse resp;
|
||||||
resp.state = number->state;
|
resp.state = number->state;
|
||||||
resp.missing_state = !number->has_state();
|
resp.missing_state = !number->has_state();
|
||||||
return fill_and_encode_entity_state(number, resp, NumberStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(number, resp, NumberStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *number = static_cast<number::Number *>(entity);
|
auto *number = static_cast<number::Number *>(entity);
|
||||||
ListEntitiesNumberResponse msg;
|
ListEntitiesNumberResponse msg;
|
||||||
msg.unit_of_measurement = number->traits.get_unit_of_measurement_ref();
|
msg.unit_of_measurement = number->traits.get_unit_of_measurement_ref();
|
||||||
@@ -769,8 +740,7 @@ uint16_t APIConnection::try_send_number_info(EntityBase *entity, APIConnection *
|
|||||||
msg.min_value = number->traits.get_min_value();
|
msg.min_value = number->traits.get_min_value();
|
||||||
msg.max_value = number->traits.get_max_value();
|
msg.max_value = number->traits.get_max_value();
|
||||||
msg.step = number->traits.get_step();
|
msg.step = number->traits.get_step();
|
||||||
return fill_and_encode_entity_info(number, msg, ListEntitiesNumberResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(number, msg, ListEntitiesNumberResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::number_command(const NumberCommandRequest &msg) {
|
void APIConnection::number_command(const NumberCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(number::Number, number, number)
|
ENTITY_COMMAND_MAKE_CALL(number::Number, number, number)
|
||||||
@@ -783,22 +753,19 @@ void APIConnection::number_command(const NumberCommandRequest &msg) {
|
|||||||
bool APIConnection::send_date_state(datetime::DateEntity *date) {
|
bool APIConnection::send_date_state(datetime::DateEntity *date) {
|
||||||
return this->send_message_smart_(date, DateStateResponse::MESSAGE_TYPE, DateStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(date, DateStateResponse::MESSAGE_TYPE, DateStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *date = static_cast<datetime::DateEntity *>(entity);
|
auto *date = static_cast<datetime::DateEntity *>(entity);
|
||||||
DateStateResponse resp;
|
DateStateResponse resp;
|
||||||
resp.missing_state = !date->has_state();
|
resp.missing_state = !date->has_state();
|
||||||
resp.year = date->year;
|
resp.year = date->year;
|
||||||
resp.month = date->month;
|
resp.month = date->month;
|
||||||
resp.day = date->day;
|
resp.day = date->day;
|
||||||
return fill_and_encode_entity_state(date, resp, DateStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(date, resp, DateStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *date = static_cast<datetime::DateEntity *>(entity);
|
auto *date = static_cast<datetime::DateEntity *>(entity);
|
||||||
ListEntitiesDateResponse msg;
|
ListEntitiesDateResponse msg;
|
||||||
return fill_and_encode_entity_info(date, msg, ListEntitiesDateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(date, msg, ListEntitiesDateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::date_command(const DateCommandRequest &msg) {
|
void APIConnection::date_command(const DateCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(datetime::DateEntity, date, date)
|
ENTITY_COMMAND_MAKE_CALL(datetime::DateEntity, date, date)
|
||||||
@@ -811,22 +778,19 @@ void APIConnection::date_command(const DateCommandRequest &msg) {
|
|||||||
bool APIConnection::send_time_state(datetime::TimeEntity *time) {
|
bool APIConnection::send_time_state(datetime::TimeEntity *time) {
|
||||||
return this->send_message_smart_(time, TimeStateResponse::MESSAGE_TYPE, TimeStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(time, TimeStateResponse::MESSAGE_TYPE, TimeStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *time = static_cast<datetime::TimeEntity *>(entity);
|
auto *time = static_cast<datetime::TimeEntity *>(entity);
|
||||||
TimeStateResponse resp;
|
TimeStateResponse resp;
|
||||||
resp.missing_state = !time->has_state();
|
resp.missing_state = !time->has_state();
|
||||||
resp.hour = time->hour;
|
resp.hour = time->hour;
|
||||||
resp.minute = time->minute;
|
resp.minute = time->minute;
|
||||||
resp.second = time->second;
|
resp.second = time->second;
|
||||||
return fill_and_encode_entity_state(time, resp, TimeStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(time, resp, TimeStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *time = static_cast<datetime::TimeEntity *>(entity);
|
auto *time = static_cast<datetime::TimeEntity *>(entity);
|
||||||
ListEntitiesTimeResponse msg;
|
ListEntitiesTimeResponse msg;
|
||||||
return fill_and_encode_entity_info(time, msg, ListEntitiesTimeResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(time, msg, ListEntitiesTimeResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::time_command(const TimeCommandRequest &msg) {
|
void APIConnection::time_command(const TimeCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(datetime::TimeEntity, time, time)
|
ENTITY_COMMAND_MAKE_CALL(datetime::TimeEntity, time, time)
|
||||||
@@ -840,8 +804,7 @@ bool APIConnection::send_datetime_state(datetime::DateTimeEntity *datetime) {
|
|||||||
return this->send_message_smart_(datetime, DateTimeStateResponse::MESSAGE_TYPE,
|
return this->send_message_smart_(datetime, DateTimeStateResponse::MESSAGE_TYPE,
|
||||||
DateTimeStateResponse::ESTIMATED_SIZE);
|
DateTimeStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
|
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
|
||||||
DateTimeStateResponse resp;
|
DateTimeStateResponse resp;
|
||||||
resp.missing_state = !datetime->has_state();
|
resp.missing_state = !datetime->has_state();
|
||||||
@@ -849,15 +812,12 @@ uint16_t APIConnection::try_send_datetime_state(EntityBase *entity, APIConnectio
|
|||||||
ESPTime state = datetime->state_as_esptime();
|
ESPTime state = datetime->state_as_esptime();
|
||||||
resp.epoch_seconds = state.timestamp;
|
resp.epoch_seconds = state.timestamp;
|
||||||
}
|
}
|
||||||
return fill_and_encode_entity_state(datetime, resp, DateTimeStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(datetime, resp, DateTimeStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
|
auto *datetime = static_cast<datetime::DateTimeEntity *>(entity);
|
||||||
ListEntitiesDateTimeResponse msg;
|
ListEntitiesDateTimeResponse msg;
|
||||||
return fill_and_encode_entity_info(datetime, msg, ListEntitiesDateTimeResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(datetime, msg, ListEntitiesDateTimeResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::datetime_command(const DateTimeCommandRequest &msg) {
|
void APIConnection::datetime_command(const DateTimeCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(datetime::DateTimeEntity, datetime, datetime)
|
ENTITY_COMMAND_MAKE_CALL(datetime::DateTimeEntity, datetime, datetime)
|
||||||
@@ -871,25 +831,22 @@ bool APIConnection::send_text_state(text::Text *text) {
|
|||||||
return this->send_message_smart_(text, TextStateResponse::MESSAGE_TYPE, TextStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(text, TextStateResponse::MESSAGE_TYPE, TextStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *text = static_cast<text::Text *>(entity);
|
auto *text = static_cast<text::Text *>(entity);
|
||||||
TextStateResponse resp;
|
TextStateResponse resp;
|
||||||
resp.state = StringRef(text->state);
|
resp.state = StringRef(text->state);
|
||||||
resp.missing_state = !text->has_state();
|
resp.missing_state = !text->has_state();
|
||||||
return fill_and_encode_entity_state(text, resp, TextStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(text, resp, TextStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *text = static_cast<text::Text *>(entity);
|
auto *text = static_cast<text::Text *>(entity);
|
||||||
ListEntitiesTextResponse msg;
|
ListEntitiesTextResponse msg;
|
||||||
msg.mode = static_cast<enums::TextMode>(text->traits.get_mode());
|
msg.mode = static_cast<enums::TextMode>(text->traits.get_mode());
|
||||||
msg.min_length = text->traits.get_min_length();
|
msg.min_length = text->traits.get_min_length();
|
||||||
msg.max_length = text->traits.get_max_length();
|
msg.max_length = text->traits.get_max_length();
|
||||||
msg.pattern = text->traits.get_pattern_ref();
|
msg.pattern = text->traits.get_pattern_ref();
|
||||||
return fill_and_encode_entity_info(text, msg, ListEntitiesTextResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(text, msg, ListEntitiesTextResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::text_command(const TextCommandRequest &msg) {
|
void APIConnection::text_command(const TextCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(text::Text, text, text)
|
ENTITY_COMMAND_MAKE_CALL(text::Text, text, text)
|
||||||
@@ -903,22 +860,19 @@ bool APIConnection::send_select_state(select::Select *select) {
|
|||||||
return this->send_message_smart_(select, SelectStateResponse::MESSAGE_TYPE, SelectStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(select, SelectStateResponse::MESSAGE_TYPE, SelectStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *select = static_cast<select::Select *>(entity);
|
auto *select = static_cast<select::Select *>(entity);
|
||||||
SelectStateResponse resp;
|
SelectStateResponse resp;
|
||||||
resp.state = select->current_option();
|
resp.state = select->current_option();
|
||||||
resp.missing_state = !select->has_state();
|
resp.missing_state = !select->has_state();
|
||||||
return fill_and_encode_entity_state(select, resp, SelectStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(select, resp, SelectStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *select = static_cast<select::Select *>(entity);
|
auto *select = static_cast<select::Select *>(entity);
|
||||||
ListEntitiesSelectResponse msg;
|
ListEntitiesSelectResponse msg;
|
||||||
msg.options = &select->traits.get_options();
|
msg.options = &select->traits.get_options();
|
||||||
return fill_and_encode_entity_info(select, msg, ListEntitiesSelectResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(select, msg, ListEntitiesSelectResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::select_command(const SelectCommandRequest &msg) {
|
void APIConnection::select_command(const SelectCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(select::Select, select, select)
|
ENTITY_COMMAND_MAKE_CALL(select::Select, select, select)
|
||||||
@@ -928,13 +882,11 @@ void APIConnection::select_command(const SelectCommandRequest &msg) {
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_BUTTON
|
#ifdef USE_BUTTON
|
||||||
uint16_t APIConnection::try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *button = static_cast<button::Button *>(entity);
|
auto *button = static_cast<button::Button *>(entity);
|
||||||
ListEntitiesButtonResponse msg;
|
ListEntitiesButtonResponse msg;
|
||||||
msg.device_class = button->get_device_class_ref();
|
msg.device_class = button->get_device_class_ref();
|
||||||
return fill_and_encode_entity_info(button, msg, ListEntitiesButtonResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(button, msg, ListEntitiesButtonResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void esphome::api::APIConnection::button_command(const ButtonCommandRequest &msg) {
|
void esphome::api::APIConnection::button_command(const ButtonCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_GET(button::Button, button, button)
|
ENTITY_COMMAND_GET(button::Button, button, button)
|
||||||
@@ -947,23 +899,20 @@ bool APIConnection::send_lock_state(lock::Lock *a_lock) {
|
|||||||
return this->send_message_smart_(a_lock, LockStateResponse::MESSAGE_TYPE, LockStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(a_lock, LockStateResponse::MESSAGE_TYPE, LockStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *a_lock = static_cast<lock::Lock *>(entity);
|
auto *a_lock = static_cast<lock::Lock *>(entity);
|
||||||
LockStateResponse resp;
|
LockStateResponse resp;
|
||||||
resp.state = static_cast<enums::LockState>(a_lock->state);
|
resp.state = static_cast<enums::LockState>(a_lock->state);
|
||||||
return fill_and_encode_entity_state(a_lock, resp, LockStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(a_lock, resp, LockStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *a_lock = static_cast<lock::Lock *>(entity);
|
auto *a_lock = static_cast<lock::Lock *>(entity);
|
||||||
ListEntitiesLockResponse msg;
|
ListEntitiesLockResponse msg;
|
||||||
msg.assumed_state = a_lock->traits.get_assumed_state();
|
msg.assumed_state = a_lock->traits.get_assumed_state();
|
||||||
msg.supports_open = a_lock->traits.get_supports_open();
|
msg.supports_open = a_lock->traits.get_supports_open();
|
||||||
msg.requires_code = a_lock->traits.get_requires_code();
|
msg.requires_code = a_lock->traits.get_requires_code();
|
||||||
return fill_and_encode_entity_info(a_lock, msg, ListEntitiesLockResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(a_lock, msg, ListEntitiesLockResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::lock_command(const LockCommandRequest &msg) {
|
void APIConnection::lock_command(const LockCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_GET(lock::Lock, a_lock, lock)
|
ENTITY_COMMAND_GET(lock::Lock, a_lock, lock)
|
||||||
@@ -986,16 +935,14 @@ void APIConnection::lock_command(const LockCommandRequest &msg) {
|
|||||||
bool APIConnection::send_valve_state(valve::Valve *valve) {
|
bool APIConnection::send_valve_state(valve::Valve *valve) {
|
||||||
return this->send_message_smart_(valve, ValveStateResponse::MESSAGE_TYPE, ValveStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(valve, ValveStateResponse::MESSAGE_TYPE, ValveStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *valve = static_cast<valve::Valve *>(entity);
|
auto *valve = static_cast<valve::Valve *>(entity);
|
||||||
ValveStateResponse resp;
|
ValveStateResponse resp;
|
||||||
resp.position = valve->position;
|
resp.position = valve->position;
|
||||||
resp.current_operation = static_cast<enums::ValveOperation>(valve->current_operation);
|
resp.current_operation = static_cast<enums::ValveOperation>(valve->current_operation);
|
||||||
return fill_and_encode_entity_state(valve, resp, ValveStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(valve, resp, ValveStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *valve = static_cast<valve::Valve *>(entity);
|
auto *valve = static_cast<valve::Valve *>(entity);
|
||||||
ListEntitiesValveResponse msg;
|
ListEntitiesValveResponse msg;
|
||||||
auto traits = valve->get_traits();
|
auto traits = valve->get_traits();
|
||||||
@@ -1003,8 +950,7 @@ uint16_t APIConnection::try_send_valve_info(EntityBase *entity, APIConnection *c
|
|||||||
msg.assumed_state = traits.get_is_assumed_state();
|
msg.assumed_state = traits.get_is_assumed_state();
|
||||||
msg.supports_position = traits.get_supports_position();
|
msg.supports_position = traits.get_supports_position();
|
||||||
msg.supports_stop = traits.get_supports_stop();
|
msg.supports_stop = traits.get_supports_stop();
|
||||||
return fill_and_encode_entity_info(valve, msg, ListEntitiesValveResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(valve, msg, ListEntitiesValveResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::valve_command(const ValveCommandRequest &msg) {
|
void APIConnection::valve_command(const ValveCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(valve::Valve, valve, valve)
|
ENTITY_COMMAND_MAKE_CALL(valve::Valve, valve, valve)
|
||||||
@@ -1021,8 +967,7 @@ bool APIConnection::send_media_player_state(media_player::MediaPlayer *media_pla
|
|||||||
return this->send_message_smart_(media_player, MediaPlayerStateResponse::MESSAGE_TYPE,
|
return this->send_message_smart_(media_player, MediaPlayerStateResponse::MESSAGE_TYPE,
|
||||||
MediaPlayerStateResponse::ESTIMATED_SIZE);
|
MediaPlayerStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
|
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
|
||||||
MediaPlayerStateResponse resp;
|
MediaPlayerStateResponse resp;
|
||||||
media_player::MediaPlayerState report_state = media_player->state == media_player::MEDIA_PLAYER_STATE_ANNOUNCING
|
media_player::MediaPlayerState report_state = media_player->state == media_player::MEDIA_PLAYER_STATE_ANNOUNCING
|
||||||
@@ -1031,11 +976,9 @@ uint16_t APIConnection::try_send_media_player_state(EntityBase *entity, APIConne
|
|||||||
resp.state = static_cast<enums::MediaPlayerState>(report_state);
|
resp.state = static_cast<enums::MediaPlayerState>(report_state);
|
||||||
resp.volume = media_player->volume;
|
resp.volume = media_player->volume;
|
||||||
resp.muted = media_player->is_muted();
|
resp.muted = media_player->is_muted();
|
||||||
return fill_and_encode_entity_state(media_player, resp, MediaPlayerStateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_state(media_player, resp, MediaPlayerStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
|
auto *media_player = static_cast<media_player::MediaPlayer *>(entity);
|
||||||
ListEntitiesMediaPlayerResponse msg;
|
ListEntitiesMediaPlayerResponse msg;
|
||||||
auto traits = media_player->get_traits();
|
auto traits = media_player->get_traits();
|
||||||
@@ -1051,7 +994,7 @@ uint16_t APIConnection::try_send_media_player_info(EntityBase *entity, APIConnec
|
|||||||
media_format.sample_bytes = supported_format.sample_bytes;
|
media_format.sample_bytes = supported_format.sample_bytes;
|
||||||
}
|
}
|
||||||
return fill_and_encode_entity_info(media_player, msg, ListEntitiesMediaPlayerResponse::MESSAGE_TYPE, conn,
|
return fill_and_encode_entity_info(media_player, msg, ListEntitiesMediaPlayerResponse::MESSAGE_TYPE, conn,
|
||||||
remaining_size, is_single);
|
remaining_size);
|
||||||
}
|
}
|
||||||
void APIConnection::media_player_command(const MediaPlayerCommandRequest &msg) {
|
void APIConnection::media_player_command(const MediaPlayerCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(media_player::MediaPlayer, media_player, media_player)
|
ENTITY_COMMAND_MAKE_CALL(media_player::MediaPlayer, media_player, media_player)
|
||||||
@@ -1092,7 +1035,7 @@ void APIConnection::try_send_camera_image_() {
|
|||||||
msg.device_id = camera::Camera::instance()->get_device_id();
|
msg.device_id = camera::Camera::instance()->get_device_id();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (!this->send_message_(msg, CameraImageResponse::MESSAGE_TYPE)) {
|
if (!this->send_message_impl(msg, CameraImageResponse::MESSAGE_TYPE)) {
|
||||||
return; // Send failed, try again later
|
return; // Send failed, try again later
|
||||||
}
|
}
|
||||||
this->image_reader_->consume_data(to_send);
|
this->image_reader_->consume_data(to_send);
|
||||||
@@ -1115,12 +1058,10 @@ void APIConnection::set_camera_state(std::shared_ptr<camera::CameraImage> image)
|
|||||||
this->try_send_camera_image_();
|
this->try_send_camera_image_();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *camera = static_cast<camera::Camera *>(entity);
|
auto *camera = static_cast<camera::Camera *>(entity);
|
||||||
ListEntitiesCameraResponse msg;
|
ListEntitiesCameraResponse msg;
|
||||||
return fill_and_encode_entity_info(camera, msg, ListEntitiesCameraResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(camera, msg, ListEntitiesCameraResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::camera_image(const CameraImageRequest &msg) {
|
void APIConnection::camera_image(const CameraImageRequest &msg) {
|
||||||
if (camera::Camera::instance() == nullptr)
|
if (camera::Camera::instance() == nullptr)
|
||||||
@@ -1305,22 +1246,22 @@ bool APIConnection::send_alarm_control_panel_state(alarm_control_panel::AlarmCon
|
|||||||
AlarmControlPanelStateResponse::ESTIMATED_SIZE);
|
AlarmControlPanelStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn,
|
uint16_t APIConnection::try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single) {
|
uint32_t remaining_size) {
|
||||||
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
|
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
|
||||||
AlarmControlPanelStateResponse resp;
|
AlarmControlPanelStateResponse resp;
|
||||||
resp.state = static_cast<enums::AlarmControlPanelState>(a_alarm_control_panel->get_state());
|
resp.state = static_cast<enums::AlarmControlPanelState>(a_alarm_control_panel->get_state());
|
||||||
return fill_and_encode_entity_state(a_alarm_control_panel, resp, AlarmControlPanelStateResponse::MESSAGE_TYPE, conn,
|
return fill_and_encode_entity_state(a_alarm_control_panel, resp, AlarmControlPanelStateResponse::MESSAGE_TYPE, conn,
|
||||||
remaining_size, is_single);
|
remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn,
|
uint16_t APIConnection::try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single) {
|
uint32_t remaining_size) {
|
||||||
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
|
auto *a_alarm_control_panel = static_cast<alarm_control_panel::AlarmControlPanel *>(entity);
|
||||||
ListEntitiesAlarmControlPanelResponse msg;
|
ListEntitiesAlarmControlPanelResponse msg;
|
||||||
msg.supported_features = a_alarm_control_panel->get_supported_features();
|
msg.supported_features = a_alarm_control_panel->get_supported_features();
|
||||||
msg.requires_code = a_alarm_control_panel->get_requires_code();
|
msg.requires_code = a_alarm_control_panel->get_requires_code();
|
||||||
msg.requires_code_to_arm = a_alarm_control_panel->get_requires_code_to_arm();
|
msg.requires_code_to_arm = a_alarm_control_panel->get_requires_code_to_arm();
|
||||||
return fill_and_encode_entity_info(a_alarm_control_panel, msg, ListEntitiesAlarmControlPanelResponse::MESSAGE_TYPE,
|
return fill_and_encode_entity_info(a_alarm_control_panel, msg, ListEntitiesAlarmControlPanelResponse::MESSAGE_TYPE,
|
||||||
conn, remaining_size, is_single);
|
conn, remaining_size);
|
||||||
}
|
}
|
||||||
void APIConnection::alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) {
|
void APIConnection::alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(alarm_control_panel::AlarmControlPanel, a_alarm_control_panel, alarm_control_panel)
|
ENTITY_COMMAND_MAKE_CALL(alarm_control_panel::AlarmControlPanel, a_alarm_control_panel, alarm_control_panel)
|
||||||
@@ -1357,8 +1298,7 @@ bool APIConnection::send_water_heater_state(water_heater::WaterHeater *water_hea
|
|||||||
return this->send_message_smart_(water_heater, WaterHeaterStateResponse::MESSAGE_TYPE,
|
return this->send_message_smart_(water_heater, WaterHeaterStateResponse::MESSAGE_TYPE,
|
||||||
WaterHeaterStateResponse::ESTIMATED_SIZE);
|
WaterHeaterStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
|
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
|
||||||
WaterHeaterStateResponse resp;
|
WaterHeaterStateResponse resp;
|
||||||
resp.mode = static_cast<enums::WaterHeaterMode>(wh->get_mode());
|
resp.mode = static_cast<enums::WaterHeaterMode>(wh->get_mode());
|
||||||
@@ -1369,10 +1309,9 @@ uint16_t APIConnection::try_send_water_heater_state(EntityBase *entity, APIConne
|
|||||||
resp.state = wh->get_state();
|
resp.state = wh->get_state();
|
||||||
resp.key = wh->get_object_id_hash();
|
resp.key = wh->get_object_id_hash();
|
||||||
|
|
||||||
return encode_message_to_buffer(resp, WaterHeaterStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return encode_message_to_buffer(resp, WaterHeaterStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
|
auto *wh = static_cast<water_heater::WaterHeater *>(entity);
|
||||||
ListEntitiesWaterHeaterResponse msg;
|
ListEntitiesWaterHeaterResponse msg;
|
||||||
auto traits = wh->get_traits();
|
auto traits = wh->get_traits();
|
||||||
@@ -1381,11 +1320,10 @@ uint16_t APIConnection::try_send_water_heater_info(EntityBase *entity, APIConnec
|
|||||||
msg.target_temperature_step = traits.get_target_temperature_step();
|
msg.target_temperature_step = traits.get_target_temperature_step();
|
||||||
msg.supported_modes = &traits.get_supported_modes();
|
msg.supported_modes = &traits.get_supported_modes();
|
||||||
msg.supported_features = traits.get_feature_flags();
|
msg.supported_features = traits.get_feature_flags();
|
||||||
return fill_and_encode_entity_info(wh, msg, ListEntitiesWaterHeaterResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(wh, msg, ListEntitiesWaterHeaterResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void APIConnection::on_water_heater_command_request(const WaterHeaterCommandRequest &msg) {
|
void APIConnection::water_heater_command(const WaterHeaterCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_MAKE_CALL(water_heater::WaterHeater, water_heater, water_heater)
|
ENTITY_COMMAND_MAKE_CALL(water_heater::WaterHeater, water_heater, water_heater)
|
||||||
if (msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_MODE)
|
if (msg.has_fields & enums::WATER_HEATER_COMMAND_HAS_MODE)
|
||||||
call.set_mode(static_cast<water_heater::WaterHeaterMode>(msg.mode));
|
call.set_mode(static_cast<water_heater::WaterHeaterMode>(msg.mode));
|
||||||
@@ -1411,20 +1349,18 @@ void APIConnection::send_event(event::Event *event) {
|
|||||||
event->get_last_event_type_index());
|
event->get_last_event_type_index());
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
|
uint16_t APIConnection::try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single) {
|
uint32_t remaining_size) {
|
||||||
EventResponse resp;
|
EventResponse resp;
|
||||||
resp.event_type = event_type;
|
resp.event_type = event_type;
|
||||||
return fill_and_encode_entity_state(event, resp, EventResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(event, resp, EventResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *event = static_cast<event::Event *>(entity);
|
auto *event = static_cast<event::Event *>(entity);
|
||||||
ListEntitiesEventResponse msg;
|
ListEntitiesEventResponse msg;
|
||||||
msg.device_class = event->get_device_class_ref();
|
msg.device_class = event->get_device_class_ref();
|
||||||
msg.event_types = &event->get_event_types();
|
msg.event_types = &event->get_event_types();
|
||||||
return fill_and_encode_entity_info(event, msg, ListEntitiesEventResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(event, msg, ListEntitiesEventResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -1447,13 +1383,11 @@ void APIConnection::send_infrared_rf_receive_event(const InfraredRFReceiveEvent
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_INFRARED
|
#ifdef USE_INFRARED
|
||||||
uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *infrared = static_cast<infrared::Infrared *>(entity);
|
auto *infrared = static_cast<infrared::Infrared *>(entity);
|
||||||
ListEntitiesInfraredResponse msg;
|
ListEntitiesInfraredResponse msg;
|
||||||
msg.capabilities = infrared->get_capability_flags();
|
msg.capabilities = infrared->get_capability_flags();
|
||||||
return fill_and_encode_entity_info(infrared, msg, ListEntitiesInfraredResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(infrared, msg, ListEntitiesInfraredResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -1461,8 +1395,7 @@ uint16_t APIConnection::try_send_infrared_info(EntityBase *entity, APIConnection
|
|||||||
bool APIConnection::send_update_state(update::UpdateEntity *update) {
|
bool APIConnection::send_update_state(update::UpdateEntity *update) {
|
||||||
return this->send_message_smart_(update, UpdateStateResponse::MESSAGE_TYPE, UpdateStateResponse::ESTIMATED_SIZE);
|
return this->send_message_smart_(update, UpdateStateResponse::MESSAGE_TYPE, UpdateStateResponse::ESTIMATED_SIZE);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *update = static_cast<update::UpdateEntity *>(entity);
|
auto *update = static_cast<update::UpdateEntity *>(entity);
|
||||||
UpdateStateResponse resp;
|
UpdateStateResponse resp;
|
||||||
resp.missing_state = !update->has_state();
|
resp.missing_state = !update->has_state();
|
||||||
@@ -1478,15 +1411,13 @@ uint16_t APIConnection::try_send_update_state(EntityBase *entity, APIConnection
|
|||||||
resp.release_summary = StringRef(update->update_info.summary);
|
resp.release_summary = StringRef(update->update_info.summary);
|
||||||
resp.release_url = StringRef(update->update_info.release_url);
|
resp.release_url = StringRef(update->update_info.release_url);
|
||||||
}
|
}
|
||||||
return fill_and_encode_entity_state(update, resp, UpdateStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return fill_and_encode_entity_state(update, resp, UpdateStateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
uint16_t APIConnection::try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
auto *update = static_cast<update::UpdateEntity *>(entity);
|
auto *update = static_cast<update::UpdateEntity *>(entity);
|
||||||
ListEntitiesUpdateResponse msg;
|
ListEntitiesUpdateResponse msg;
|
||||||
msg.device_class = update->get_device_class_ref();
|
msg.device_class = update->get_device_class_ref();
|
||||||
return fill_and_encode_entity_info(update, msg, ListEntitiesUpdateResponse::MESSAGE_TYPE, conn, remaining_size,
|
return fill_and_encode_entity_info(update, msg, ListEntitiesUpdateResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
is_single);
|
|
||||||
}
|
}
|
||||||
void APIConnection::update_command(const UpdateCommandRequest &msg) {
|
void APIConnection::update_command(const UpdateCommandRequest &msg) {
|
||||||
ENTITY_COMMAND_GET(update::UpdateEntity, update, update)
|
ENTITY_COMMAND_GET(update::UpdateEntity, update, update)
|
||||||
@@ -1512,7 +1443,7 @@ bool APIConnection::try_send_log_message(int level, const char *tag, const char
|
|||||||
SubscribeLogsResponse msg;
|
SubscribeLogsResponse msg;
|
||||||
msg.level = static_cast<enums::LogLevel>(level);
|
msg.level = static_cast<enums::LogLevel>(level);
|
||||||
msg.set_message(reinterpret_cast<const uint8_t *>(line), message_len);
|
msg.set_message(reinterpret_cast<const uint8_t *>(line), message_len);
|
||||||
return this->send_message_(msg, SubscribeLogsResponse::MESSAGE_TYPE);
|
return this->send_message_impl(msg, SubscribeLogsResponse::MESSAGE_TYPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
void APIConnection::complete_authentication_() {
|
void APIConnection::complete_authentication_() {
|
||||||
@@ -1712,17 +1643,16 @@ void APIConnection::on_home_assistant_state_response(const HomeAssistantStateRes
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create null-terminated state for callback (parse_number needs null-termination)
|
// Create null-terminated state for callback (parse_number needs null-termination)
|
||||||
// HA state max length is 255, so 256 byte buffer covers all cases
|
// HA state max length is 255 characters, but attributes can be much longer
|
||||||
char state_buf[256];
|
// Use stack buffer for common case (states), heap fallback for large attributes
|
||||||
size_t copy_len = msg.state.size();
|
size_t state_len = msg.state.size();
|
||||||
if (copy_len >= sizeof(state_buf)) {
|
SmallBufferWithHeapFallback<MAX_STATE_LEN + 1> state_buf_alloc(state_len + 1);
|
||||||
copy_len = sizeof(state_buf) - 1; // Truncate to leave space for null terminator
|
char *state_buf = reinterpret_cast<char *>(state_buf_alloc.get());
|
||||||
|
if (state_len > 0) {
|
||||||
|
memcpy(state_buf, msg.state.c_str(), state_len);
|
||||||
}
|
}
|
||||||
if (copy_len > 0) {
|
state_buf[state_len] = '\0';
|
||||||
memcpy(state_buf, msg.state.c_str(), copy_len);
|
it.callback(StringRef(state_buf, state_len));
|
||||||
}
|
|
||||||
state_buf[copy_len] = '\0';
|
|
||||||
it.callback(StringRef(state_buf, copy_len));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@@ -1838,6 +1768,14 @@ bool APIConnection::try_to_clear_buffer(bool log_out_of_space) {
|
|||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
bool APIConnection::send_message_impl(const ProtoMessage &msg, uint8_t message_type) {
|
||||||
|
ProtoSize size;
|
||||||
|
msg.calculate_size(size);
|
||||||
|
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
|
||||||
|
this->prepare_first_message_buffer(shared_buf, size.get_size());
|
||||||
|
msg.encode({&shared_buf});
|
||||||
|
return this->send_buffer({&shared_buf}, message_type);
|
||||||
|
}
|
||||||
bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
|
bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
|
||||||
const bool is_log_message = (message_type == SubscribeLogsResponse::MESSAGE_TYPE);
|
const bool is_log_message = (message_type == SubscribeLogsResponse::MESSAGE_TYPE);
|
||||||
|
|
||||||
@@ -1845,23 +1783,8 @@ bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Toggle Nagle's algorithm based on message type to prevent log messages from
|
// Set TCP_NODELAY based on message type - see set_nodelay_for_message() for details
|
||||||
// filling the TCP send buffer and crowding out important state updates.
|
this->helper_->set_nodelay_for_message(is_log_message);
|
||||||
//
|
|
||||||
// This honors the `no_delay` proto option - SubscribeLogsResponse is the only
|
|
||||||
// message with `option (no_delay) = false;` in api.proto, indicating it should
|
|
||||||
// allow Nagle coalescing. This option existed since 2019 but was never implemented.
|
|
||||||
//
|
|
||||||
// - Log messages: Enable Nagle (NODELAY=false) so small log packets coalesce
|
|
||||||
// into fewer, larger packets. They flush naturally via TCP delayed ACK timer
|
|
||||||
// (~200ms), buffer filling, or when a state update triggers a flush.
|
|
||||||
//
|
|
||||||
// - All other messages (state updates, responses): Disable Nagle (NODELAY=true)
|
|
||||||
// for immediate delivery. These are time-sensitive and should not be delayed.
|
|
||||||
//
|
|
||||||
// This must be done proactively BEFORE the buffer fills up - checking buffer
|
|
||||||
// state here would be too late since we'd already be in a degraded state.
|
|
||||||
this->helper_->set_nodelay(!is_log_message);
|
|
||||||
|
|
||||||
APIError err = this->helper_->write_protobuf_packet(message_type, buffer);
|
APIError err = this->helper_->write_protobuf_packet(message_type, buffer);
|
||||||
if (err == APIError::WOULD_BLOCK)
|
if (err == APIError::WOULD_BLOCK)
|
||||||
@@ -1913,6 +1836,23 @@ void APIConnection::DeferredBatch::add_item_front(EntityBase *entity, uint8_t me
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool APIConnection::send_message_smart_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
|
||||||
|
uint8_t aux_data_index) {
|
||||||
|
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
|
||||||
|
auto &shared_buf = this->parent_->get_shared_buffer_ref();
|
||||||
|
this->prepare_first_message_buffer(shared_buf, estimated_size);
|
||||||
|
DeferredBatch::BatchItem item{entity, message_type, estimated_size, aux_data_index};
|
||||||
|
if (this->dispatch_message_(item, MAX_BATCH_PACKET_SIZE, true) &&
|
||||||
|
this->send_buffer(ProtoWriteBuffer{&shared_buf}, message_type)) {
|
||||||
|
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||||
|
this->log_batch_item_(item);
|
||||||
|
#endif
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this->schedule_message_(entity, message_type, estimated_size, aux_data_index);
|
||||||
|
}
|
||||||
|
|
||||||
bool APIConnection::schedule_batch_() {
|
bool APIConnection::schedule_batch_() {
|
||||||
if (!this->flags_.batch_scheduled) {
|
if (!this->flags_.batch_scheduled) {
|
||||||
this->flags_.batch_scheduled = true;
|
this->flags_.batch_scheduled = true;
|
||||||
@@ -1941,10 +1881,21 @@ void APIConnection::process_batch_() {
|
|||||||
auto &shared_buf = this->parent_->get_shared_buffer_ref();
|
auto &shared_buf = this->parent_->get_shared_buffer_ref();
|
||||||
size_t num_items = this->deferred_batch_.size();
|
size_t num_items = this->deferred_batch_.size();
|
||||||
|
|
||||||
// Fast path for single message - allocate exact size needed
|
// Cache these values to avoid repeated virtual calls
|
||||||
|
const uint8_t header_padding = this->helper_->frame_header_padding();
|
||||||
|
const uint8_t footer_size = this->helper_->frame_footer_size();
|
||||||
|
|
||||||
|
// Pre-calculate exact buffer size needed based on message types
|
||||||
|
uint32_t total_estimated_size = num_items * (header_padding + footer_size);
|
||||||
|
for (size_t i = 0; i < num_items; i++) {
|
||||||
|
total_estimated_size += this->deferred_batch_[i].estimated_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
this->prepare_first_message_buffer(shared_buf, header_padding, total_estimated_size);
|
||||||
|
|
||||||
|
// Fast path for single message - buffer already allocated above
|
||||||
if (num_items == 1) {
|
if (num_items == 1) {
|
||||||
const auto &item = this->deferred_batch_[0];
|
const auto &item = this->deferred_batch_[0];
|
||||||
|
|
||||||
// Let dispatch_message_ calculate size and encode if it fits
|
// Let dispatch_message_ calculate size and encode if it fits
|
||||||
uint16_t payload_size = this->dispatch_message_(item, std::numeric_limits<uint16_t>::max(), true);
|
uint16_t payload_size = this->dispatch_message_(item, std::numeric_limits<uint16_t>::max(), true);
|
||||||
|
|
||||||
@@ -1967,30 +1918,8 @@ void APIConnection::process_batch_() {
|
|||||||
// Stack-allocated array for message info
|
// Stack-allocated array for message info
|
||||||
alignas(MessageInfo) char message_info_storage[MAX_MESSAGES_PER_BATCH * sizeof(MessageInfo)];
|
alignas(MessageInfo) char message_info_storage[MAX_MESSAGES_PER_BATCH * sizeof(MessageInfo)];
|
||||||
MessageInfo *message_info = reinterpret_cast<MessageInfo *>(message_info_storage);
|
MessageInfo *message_info = reinterpret_cast<MessageInfo *>(message_info_storage);
|
||||||
size_t message_count = 0;
|
|
||||||
|
|
||||||
// Cache these values to avoid repeated virtual calls
|
|
||||||
const uint8_t header_padding = this->helper_->frame_header_padding();
|
|
||||||
const uint8_t footer_size = this->helper_->frame_footer_size();
|
|
||||||
|
|
||||||
// Initialize buffer and tracking variables
|
|
||||||
shared_buf.clear();
|
|
||||||
|
|
||||||
// Pre-calculate exact buffer size needed based on message types
|
|
||||||
uint32_t total_estimated_size = num_items * (header_padding + footer_size);
|
|
||||||
for (size_t i = 0; i < this->deferred_batch_.size(); i++) {
|
|
||||||
const auto &item = this->deferred_batch_[i];
|
|
||||||
total_estimated_size += item.estimated_size;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate total overhead for all messages
|
|
||||||
// Reserve based on estimated size (much more accurate than 24-byte worst-case)
|
|
||||||
shared_buf.reserve(total_estimated_size);
|
|
||||||
this->flags_.batch_first_message = true;
|
|
||||||
|
|
||||||
size_t items_processed = 0;
|
size_t items_processed = 0;
|
||||||
uint16_t remaining_size = std::numeric_limits<uint16_t>::max();
|
uint16_t remaining_size = std::numeric_limits<uint16_t>::max();
|
||||||
|
|
||||||
// Track where each message's header padding begins in the buffer
|
// Track where each message's header padding begins in the buffer
|
||||||
// For plaintext: this is where the 6-byte header padding starts
|
// For plaintext: this is where the 6-byte header padding starts
|
||||||
// For noise: this is where the 7-byte header padding starts
|
// For noise: this is where the 7-byte header padding starts
|
||||||
@@ -2002,7 +1931,7 @@ void APIConnection::process_batch_() {
|
|||||||
const auto &item = this->deferred_batch_[i];
|
const auto &item = this->deferred_batch_[i];
|
||||||
// Try to encode message via dispatch
|
// Try to encode message via dispatch
|
||||||
// The dispatch function calculates overhead to determine if the message fits
|
// The dispatch function calculates overhead to determine if the message fits
|
||||||
uint16_t payload_size = this->dispatch_message_(item, remaining_size, false);
|
uint16_t payload_size = this->dispatch_message_(item, remaining_size, i == 0);
|
||||||
|
|
||||||
if (payload_size == 0) {
|
if (payload_size == 0) {
|
||||||
// Message won't fit, stop processing
|
// Message won't fit, stop processing
|
||||||
@@ -2016,10 +1945,7 @@ void APIConnection::process_batch_() {
|
|||||||
// This avoids default-constructing all MAX_MESSAGES_PER_BATCH elements
|
// This avoids default-constructing all MAX_MESSAGES_PER_BATCH elements
|
||||||
// Explicit destruction is not needed because MessageInfo is trivially destructible,
|
// Explicit destruction is not needed because MessageInfo is trivially destructible,
|
||||||
// as ensured by the static_assert in its definition.
|
// as ensured by the static_assert in its definition.
|
||||||
new (&message_info[message_count++]) MessageInfo(item.message_type, current_offset, proto_payload_size);
|
new (&message_info[items_processed++]) MessageInfo(item.message_type, current_offset, proto_payload_size);
|
||||||
|
|
||||||
// Update tracking variables
|
|
||||||
items_processed++;
|
|
||||||
// After first message, set remaining size to MAX_BATCH_PACKET_SIZE to avoid fragmentation
|
// After first message, set remaining size to MAX_BATCH_PACKET_SIZE to avoid fragmentation
|
||||||
if (items_processed == 1) {
|
if (items_processed == 1) {
|
||||||
remaining_size = MAX_BATCH_PACKET_SIZE;
|
remaining_size = MAX_BATCH_PACKET_SIZE;
|
||||||
@@ -2042,7 +1968,7 @@ void APIConnection::process_batch_() {
|
|||||||
|
|
||||||
// Send all collected messages
|
// Send all collected messages
|
||||||
APIError err = this->helper_->write_protobuf_messages(ProtoWriteBuffer{&shared_buf},
|
APIError err = this->helper_->write_protobuf_messages(ProtoWriteBuffer{&shared_buf},
|
||||||
std::span<const MessageInfo>(message_info, message_count));
|
std::span<const MessageInfo>(message_info, items_processed));
|
||||||
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
|
||||||
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
|
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
|
||||||
}
|
}
|
||||||
@@ -2071,7 +1997,8 @@ void APIConnection::process_batch_() {
|
|||||||
// Dispatch message encoding based on message_type
|
// Dispatch message encoding based on message_type
|
||||||
// Switch assigns function pointer, single call site for smaller code size
|
// Switch assigns function pointer, single call site for smaller code size
|
||||||
uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size,
|
uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size,
|
||||||
bool is_single) {
|
bool batch_first) {
|
||||||
|
this->flags_.batch_first_message = batch_first;
|
||||||
#ifdef USE_EVENT
|
#ifdef USE_EVENT
|
||||||
// Events need aux_data_index to look up event type from entity
|
// Events need aux_data_index to look up event type from entity
|
||||||
if (item.message_type == EventResponse::MESSAGE_TYPE) {
|
if (item.message_type == EventResponse::MESSAGE_TYPE) {
|
||||||
@@ -2080,7 +2007,7 @@ uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item,
|
|||||||
return 0;
|
return 0;
|
||||||
auto *event = static_cast<event::Event *>(item.entity);
|
auto *event = static_cast<event::Event *>(item.entity);
|
||||||
return try_send_event_response(event, StringRef::from_maybe_nullptr(event->get_event_type(item.aux_data_index)),
|
return try_send_event_response(event, StringRef::from_maybe_nullptr(event->get_event_type(item.aux_data_index)),
|
||||||
this, remaining_size, is_single);
|
this, remaining_size);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -2190,25 +2117,22 @@ uint16_t APIConnection::dispatch_message_(const DeferredBatch::BatchItem &item,
|
|||||||
#undef CASE_STATE_INFO
|
#undef CASE_STATE_INFO
|
||||||
#undef CASE_INFO_ONLY
|
#undef CASE_INFO_ONLY
|
||||||
|
|
||||||
return func(item.entity, this, remaining_size, is_single);
|
return func(item.entity, this, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
ListEntitiesDoneResponse resp;
|
ListEntitiesDoneResponse resp;
|
||||||
return encode_message_to_buffer(resp, ListEntitiesDoneResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return encode_message_to_buffer(resp, ListEntitiesDoneResponse::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
DisconnectRequest req;
|
DisconnectRequest req;
|
||||||
return encode_message_to_buffer(req, DisconnectRequest::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return encode_message_to_buffer(req, DisconnectRequest::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t APIConnection::try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
uint16_t APIConnection::try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size) {
|
||||||
bool is_single) {
|
|
||||||
PingRequest req;
|
PingRequest req;
|
||||||
return encode_message_to_buffer(req, PingRequest::MESSAGE_TYPE, conn, remaining_size, is_single);
|
return encode_message_to_buffer(req, PingRequest::MESSAGE_TYPE, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_API_HOMEASSISTANT_STATES
|
#ifdef USE_API_HOMEASSISTANT_STATES
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
|
|
||||||
#ifdef USE_WATER_HEATER
|
#ifdef USE_WATER_HEATER
|
||||||
bool send_water_heater_state(water_heater::WaterHeater *water_heater);
|
bool send_water_heater_state(water_heater::WaterHeater *water_heater);
|
||||||
void on_water_heater_command_request(const WaterHeaterCommandRequest &msg) override;
|
void water_heater_command(const WaterHeaterCommandRequest &msg) override;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_IR_RF
|
#ifdef USE_IR_RF
|
||||||
@@ -255,17 +255,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
|
|
||||||
void on_fatal_error() override;
|
void on_fatal_error() override;
|
||||||
void on_no_setup_connection() override;
|
void on_no_setup_connection() override;
|
||||||
ProtoWriteBuffer create_buffer(uint32_t reserve_size) override {
|
bool send_message_impl(const ProtoMessage &msg, uint8_t message_type) override;
|
||||||
// FIXME: ensure no recursive writes can happen
|
|
||||||
|
|
||||||
// Get header padding size - used for both reserve and insert
|
|
||||||
uint8_t header_padding = this->helper_->frame_header_padding();
|
|
||||||
// Get shared buffer from parent server
|
|
||||||
std::vector<uint8_t> &shared_buf = this->parent_->get_shared_buffer_ref();
|
|
||||||
this->prepare_first_message_buffer(shared_buf, header_padding,
|
|
||||||
reserve_size + header_padding + this->helper_->frame_footer_size());
|
|
||||||
return {&shared_buf};
|
|
||||||
}
|
|
||||||
|
|
||||||
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t header_padding, size_t total_size) {
|
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t header_padding, size_t total_size) {
|
||||||
shared_buf.clear();
|
shared_buf.clear();
|
||||||
@@ -277,6 +267,13 @@ class APIConnection final : public APIServerConnection {
|
|||||||
shared_buf.resize(header_padding);
|
shared_buf.resize(header_padding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convenience overload - computes frame overhead internally
|
||||||
|
void prepare_first_message_buffer(std::vector<uint8_t> &shared_buf, size_t payload_size) {
|
||||||
|
const uint8_t header_padding = this->helper_->frame_header_padding();
|
||||||
|
const uint8_t footer_size = this->helper_->frame_footer_size();
|
||||||
|
this->prepare_first_message_buffer(shared_buf, header_padding, payload_size + header_padding + footer_size);
|
||||||
|
}
|
||||||
|
|
||||||
bool try_to_clear_buffer(bool log_out_of_space);
|
bool try_to_clear_buffer(bool log_out_of_space);
|
||||||
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
|
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
|
||||||
|
|
||||||
@@ -298,21 +295,21 @@ class APIConnection final : public APIServerConnection {
|
|||||||
|
|
||||||
// Non-template helper to encode any ProtoMessage
|
// Non-template helper to encode any ProtoMessage
|
||||||
static uint16_t encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
|
static uint16_t encode_message_to_buffer(ProtoMessage &msg, uint8_t message_type, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single);
|
uint32_t remaining_size);
|
||||||
|
|
||||||
// Helper to fill entity state base and encode message
|
// Helper to fill entity state base and encode message
|
||||||
static uint16_t fill_and_encode_entity_state(EntityBase *entity, StateResponseProtoMessage &msg, uint8_t message_type,
|
static uint16_t fill_and_encode_entity_state(EntityBase *entity, StateResponseProtoMessage &msg, uint8_t message_type,
|
||||||
APIConnection *conn, uint32_t remaining_size, bool is_single) {
|
APIConnection *conn, uint32_t remaining_size) {
|
||||||
msg.key = entity->get_object_id_hash();
|
msg.key = entity->get_object_id_hash();
|
||||||
#ifdef USE_DEVICES
|
#ifdef USE_DEVICES
|
||||||
msg.device_id = entity->get_device_id();
|
msg.device_id = entity->get_device_id();
|
||||||
#endif
|
#endif
|
||||||
return encode_message_to_buffer(msg, message_type, conn, remaining_size, is_single);
|
return encode_message_to_buffer(msg, message_type, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper to fill entity info base and encode message
|
// Helper to fill entity info base and encode message
|
||||||
static uint16_t fill_and_encode_entity_info(EntityBase *entity, InfoResponseProtoMessage &msg, uint8_t message_type,
|
static uint16_t fill_and_encode_entity_info(EntityBase *entity, InfoResponseProtoMessage &msg, uint8_t message_type,
|
||||||
APIConnection *conn, uint32_t remaining_size, bool is_single) {
|
APIConnection *conn, uint32_t remaining_size) {
|
||||||
// Set common fields that are shared by all entity types
|
// Set common fields that are shared by all entity types
|
||||||
msg.key = entity->get_object_id_hash();
|
msg.key = entity->get_object_id_hash();
|
||||||
|
|
||||||
@@ -339,7 +336,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
#ifdef USE_DEVICES
|
#ifdef USE_DEVICES
|
||||||
msg.device_id = entity->get_device_id();
|
msg.device_id = entity->get_device_id();
|
||||||
#endif
|
#endif
|
||||||
return encode_message_to_buffer(msg, message_type, conn, remaining_size, is_single);
|
return encode_message_to_buffer(msg, message_type, conn, remaining_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_VOICE_ASSISTANT
|
#ifdef USE_VOICE_ASSISTANT
|
||||||
@@ -370,141 +367,108 @@ class APIConnection final : public APIServerConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#ifdef USE_BINARY_SENSOR
|
#ifdef USE_BINARY_SENSOR
|
||||||
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_binary_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_binary_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_COVER
|
#ifdef USE_COVER
|
||||||
static uint16_t try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_cover_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_cover_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_FAN
|
#ifdef USE_FAN
|
||||||
static uint16_t try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_fan_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_fan_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_LIGHT
|
#ifdef USE_LIGHT
|
||||||
static uint16_t try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_light_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_light_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_SENSOR
|
#ifdef USE_SENSOR
|
||||||
static uint16_t try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_SWITCH
|
#ifdef USE_SWITCH
|
||||||
static uint16_t try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_switch_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_switch_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_TEXT_SENSOR
|
#ifdef USE_TEXT_SENSOR
|
||||||
static uint16_t try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_text_sensor_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_text_sensor_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_CLIMATE
|
#ifdef USE_CLIMATE
|
||||||
static uint16_t try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_climate_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_climate_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_NUMBER
|
#ifdef USE_NUMBER
|
||||||
static uint16_t try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_number_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_number_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_DATETIME_DATE
|
#ifdef USE_DATETIME_DATE
|
||||||
static uint16_t try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_date_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_date_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_DATETIME_TIME
|
#ifdef USE_DATETIME_TIME
|
||||||
static uint16_t try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_time_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_time_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_DATETIME_DATETIME
|
#ifdef USE_DATETIME_DATETIME
|
||||||
static uint16_t try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_datetime_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_datetime_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_TEXT
|
#ifdef USE_TEXT
|
||||||
static uint16_t try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_text_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_text_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_SELECT
|
#ifdef USE_SELECT
|
||||||
static uint16_t try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_select_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_select_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_BUTTON
|
#ifdef USE_BUTTON
|
||||||
static uint16_t try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_button_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_LOCK
|
#ifdef USE_LOCK
|
||||||
static uint16_t try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_lock_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_lock_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_VALVE
|
#ifdef USE_VALVE
|
||||||
static uint16_t try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_valve_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_valve_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_MEDIA_PLAYER
|
#ifdef USE_MEDIA_PLAYER
|
||||||
static uint16_t try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_media_player_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_media_player_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_ALARM_CONTROL_PANEL
|
#ifdef USE_ALARM_CONTROL_PANEL
|
||||||
static uint16_t try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_alarm_control_panel_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_alarm_control_panel_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_WATER_HEATER
|
#ifdef USE_WATER_HEATER
|
||||||
static uint16_t try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_water_heater_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_water_heater_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_INFRARED
|
#ifdef USE_INFRARED
|
||||||
static uint16_t try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_infrared_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_EVENT
|
#ifdef USE_EVENT
|
||||||
static uint16_t try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
|
static uint16_t try_send_event_response(event::Event *event, StringRef event_type, APIConnection *conn,
|
||||||
uint32_t remaining_size, bool is_single);
|
uint32_t remaining_size);
|
||||||
static uint16_t try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size, bool is_single);
|
static uint16_t try_send_event_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_UPDATE
|
#ifdef USE_UPDATE
|
||||||
static uint16_t try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_update_state(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
static uint16_t try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
static uint16_t try_send_update_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_CAMERA
|
#ifdef USE_CAMERA
|
||||||
static uint16_t try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_camera_info(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Method for ListEntitiesDone batching
|
// Method for ListEntitiesDone batching
|
||||||
static uint16_t try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_list_info_done(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
|
|
||||||
// Method for DisconnectRequest batching
|
// Method for DisconnectRequest batching
|
||||||
static uint16_t try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_disconnect_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
|
|
||||||
// Batch message method for ping requests
|
// Batch message method for ping requests
|
||||||
static uint16_t try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size,
|
static uint16_t try_send_ping_request(EntityBase *entity, APIConnection *conn, uint32_t remaining_size);
|
||||||
bool is_single);
|
|
||||||
|
|
||||||
// === Optimal member ordering for 32-bit systems ===
|
// === Optimal member ordering for 32-bit systems ===
|
||||||
|
|
||||||
@@ -539,7 +503,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Function pointer type for message encoding
|
// Function pointer type for message encoding
|
||||||
using MessageCreatorPtr = uint16_t (*)(EntityBase *, APIConnection *, uint32_t remaining_size, bool is_single);
|
using MessageCreatorPtr = uint16_t (*)(EntityBase *, APIConnection *, uint32_t remaining_size);
|
||||||
|
|
||||||
// Generic batching mechanism for both state updates and entity info
|
// Generic batching mechanism for both state updates and entity info
|
||||||
struct DeferredBatch {
|
struct DeferredBatch {
|
||||||
@@ -652,7 +616,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
|
|
||||||
// Dispatch message encoding based on message_type - replaces function pointer storage
|
// Dispatch message encoding based on message_type - replaces function pointer storage
|
||||||
// Switch assigns pointer, single call site for smaller code size
|
// Switch assigns pointer, single call site for smaller code size
|
||||||
uint16_t dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size, bool is_single);
|
uint16_t dispatch_message_(const DeferredBatch::BatchItem &item, uint32_t remaining_size, bool batch_first);
|
||||||
|
|
||||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
#ifdef HAS_PROTO_MESSAGE_DUMP
|
||||||
void log_batch_item_(const DeferredBatch::BatchItem &item) {
|
void log_batch_item_(const DeferredBatch::BatchItem &item) {
|
||||||
@@ -684,19 +648,7 @@ class APIConnection final : public APIServerConnection {
|
|||||||
// Tries immediate send if should_send_immediately_() returns true and buffer has space
|
// Tries immediate send if should_send_immediately_() returns true and buffer has space
|
||||||
// Falls back to batching if immediate send fails or isn't applicable
|
// Falls back to batching if immediate send fails or isn't applicable
|
||||||
bool send_message_smart_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
|
bool send_message_smart_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
|
||||||
uint8_t aux_data_index = DeferredBatch::AUX_DATA_UNUSED) {
|
uint8_t aux_data_index = DeferredBatch::AUX_DATA_UNUSED);
|
||||||
if (this->should_send_immediately_(message_type) && this->helper_->can_write_without_blocking()) {
|
|
||||||
DeferredBatch::BatchItem item{entity, message_type, estimated_size, aux_data_index};
|
|
||||||
if (this->dispatch_message_(item, MAX_BATCH_PACKET_SIZE, true) &&
|
|
||||||
this->send_buffer(ProtoWriteBuffer{&this->parent_->get_shared_buffer_ref()}, message_type)) {
|
|
||||||
#ifdef HAS_PROTO_MESSAGE_DUMP
|
|
||||||
this->log_batch_item_(item);
|
|
||||||
#endif
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this->schedule_message_(entity, message_type, estimated_size, aux_data_index);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper function to schedule a deferred message with known message type
|
// Helper function to schedule a deferred message with known message type
|
||||||
bool schedule_message_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
|
bool schedule_message_(EntityBase *entity, uint8_t message_type, uint8_t estimated_size,
|
||||||
|
|||||||
@@ -120,26 +120,39 @@ class APIFrameHelper {
|
|||||||
}
|
}
|
||||||
return APIError::OK;
|
return APIError::OK;
|
||||||
}
|
}
|
||||||
/// Toggle TCP_NODELAY socket option to control Nagle's algorithm.
|
// Manage TCP_NODELAY (Nagle's algorithm) based on message type.
|
||||||
///
|
//
|
||||||
/// This is used to allow log messages to coalesce (Nagle enabled) while keeping
|
// For non-log messages (sensor data, state updates): Always disable Nagle
|
||||||
/// state updates low-latency (NODELAY enabled). Without this, many small log
|
// (NODELAY on) for immediate delivery - these are time-sensitive.
|
||||||
/// packets fill the TCP send buffer, crowding out important state updates.
|
//
|
||||||
///
|
// For log messages: Use Nagle to coalesce multiple small log packets into
|
||||||
/// State is tracked to minimize setsockopt() overhead - on lwip_raw (ESP8266/RP2040)
|
// fewer larger packets, reducing WiFi overhead. However, we limit batching
|
||||||
/// this is just a boolean assignment; on other platforms it's a lightweight syscall.
|
// to 3 messages to avoid excessive LWIP buffer pressure on memory-constrained
|
||||||
///
|
// devices like ESP8266. LWIP's TCP_OVERSIZE option coalesces the data into
|
||||||
/// @param enable true to enable NODELAY (disable Nagle), false to enable Nagle
|
// shared pbufs, but holding data too long waiting for Nagle's timer causes
|
||||||
/// @return true if successful or already in desired state
|
// buffer exhaustion and dropped messages.
|
||||||
bool set_nodelay(bool enable) {
|
//
|
||||||
if (this->nodelay_enabled_ == enable)
|
// Flow: Log 1 (Nagle on) -> Log 2 (Nagle on) -> Log 3 (NODELAY, flush all)
|
||||||
return true;
|
//
|
||||||
int val = enable ? 1 : 0;
|
void set_nodelay_for_message(bool is_log_message) {
|
||||||
int err = this->socket_->setsockopt(IPPROTO_TCP, TCP_NODELAY, &val, sizeof(int));
|
if (!is_log_message) {
|
||||||
if (err == 0) {
|
if (this->nodelay_state_ != NODELAY_ON) {
|
||||||
this->nodelay_enabled_ = enable;
|
this->set_nodelay_raw_(true);
|
||||||
|
this->nodelay_state_ = NODELAY_ON;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log messages 1-3: state transitions -1 -> 1 -> 2 -> -1 (flush on 3rd)
|
||||||
|
if (this->nodelay_state_ == NODELAY_ON) {
|
||||||
|
this->set_nodelay_raw_(false);
|
||||||
|
this->nodelay_state_ = 1;
|
||||||
|
} else if (this->nodelay_state_ >= LOG_NAGLE_COUNT) {
|
||||||
|
this->set_nodelay_raw_(true);
|
||||||
|
this->nodelay_state_ = NODELAY_ON;
|
||||||
|
} else {
|
||||||
|
this->nodelay_state_++;
|
||||||
}
|
}
|
||||||
return err == 0;
|
|
||||||
}
|
}
|
||||||
virtual APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) = 0;
|
virtual APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) = 0;
|
||||||
// Write multiple protobuf messages in a single operation
|
// Write multiple protobuf messages in a single operation
|
||||||
@@ -229,10 +242,18 @@ class APIFrameHelper {
|
|||||||
uint8_t tx_buf_head_{0};
|
uint8_t tx_buf_head_{0};
|
||||||
uint8_t tx_buf_tail_{0};
|
uint8_t tx_buf_tail_{0};
|
||||||
uint8_t tx_buf_count_{0};
|
uint8_t tx_buf_count_{0};
|
||||||
// Tracks TCP_NODELAY state to minimize setsockopt() calls. Initialized to true
|
// Nagle batching state for log messages. NODELAY_ON (-1) means NODELAY is enabled
|
||||||
// since init_common_() enables NODELAY. Used by set_nodelay() to allow log
|
// (immediate send). Values 1-2 count log messages in the current Nagle batch.
|
||||||
// messages to coalesce while keeping state updates low-latency.
|
// After LOG_NAGLE_COUNT logs, we switch to NODELAY to flush and reset.
|
||||||
bool nodelay_enabled_{true};
|
static constexpr int8_t NODELAY_ON = -1;
|
||||||
|
static constexpr int8_t LOG_NAGLE_COUNT = 2;
|
||||||
|
int8_t nodelay_state_{NODELAY_ON};
|
||||||
|
|
||||||
|
// Internal helper to set TCP_NODELAY socket option
|
||||||
|
void set_nodelay_raw_(bool enable) {
|
||||||
|
int val = enable ? 1 : 0;
|
||||||
|
this->socket_->setsockopt(IPPROTO_TCP, TCP_NODELAY, &val, sizeof(int));
|
||||||
|
}
|
||||||
|
|
||||||
// Common initialization for both plaintext and noise protocols
|
// Common initialization for both plaintext and noise protocols
|
||||||
APIError init_common_();
|
APIError init_common_();
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#ifdef USE_API_NOISE
|
#ifdef USE_API_NOISE
|
||||||
#include "api_connection.h" // For ClientInfo struct
|
#include "api_connection.h" // For ClientInfo struct
|
||||||
#include "esphome/core/application.h"
|
#include "esphome/core/application.h"
|
||||||
|
#include "esphome/core/entity_base.h"
|
||||||
#include "esphome/core/hal.h"
|
#include "esphome/core/hal.h"
|
||||||
#include "esphome/core/helpers.h"
|
#include "esphome/core/helpers.h"
|
||||||
#include "esphome/core/log.h"
|
#include "esphome/core/log.h"
|
||||||
@@ -256,28 +257,30 @@ APIError APINoiseFrameHelper::state_action_() {
|
|||||||
}
|
}
|
||||||
if (state_ == State::SERVER_HELLO) {
|
if (state_ == State::SERVER_HELLO) {
|
||||||
// send server hello
|
// send server hello
|
||||||
constexpr size_t mac_len = 13; // 12 hex chars + null terminator
|
|
||||||
const std::string &name = App.get_name();
|
const std::string &name = App.get_name();
|
||||||
char mac[mac_len];
|
char mac[MAC_ADDRESS_BUFFER_SIZE];
|
||||||
get_mac_address_into_buffer(mac);
|
get_mac_address_into_buffer(mac);
|
||||||
|
|
||||||
// Calculate positions and sizes
|
// Calculate positions and sizes
|
||||||
size_t name_len = name.size() + 1; // including null terminator
|
size_t name_len = name.size() + 1; // including null terminator
|
||||||
size_t name_offset = 1;
|
size_t name_offset = 1;
|
||||||
size_t mac_offset = name_offset + name_len;
|
size_t mac_offset = name_offset + name_len;
|
||||||
size_t total_size = 1 + name_len + mac_len;
|
size_t total_size = 1 + name_len + MAC_ADDRESS_BUFFER_SIZE;
|
||||||
|
|
||||||
auto msg = std::make_unique<uint8_t[]>(total_size);
|
// 1 (proto) + name (max ESPHOME_DEVICE_NAME_MAX_LEN) + 1 (name null)
|
||||||
|
// + mac (MAC_ADDRESS_BUFFER_SIZE - 1) + 1 (mac null)
|
||||||
|
constexpr size_t max_msg_size = 1 + ESPHOME_DEVICE_NAME_MAX_LEN + 1 + MAC_ADDRESS_BUFFER_SIZE;
|
||||||
|
uint8_t msg[max_msg_size];
|
||||||
|
|
||||||
// chosen proto
|
// chosen proto
|
||||||
msg[0] = 0x01;
|
msg[0] = 0x01;
|
||||||
|
|
||||||
// node name, terminated by null byte
|
// node name, terminated by null byte
|
||||||
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
|
std::memcpy(msg + name_offset, name.c_str(), name_len);
|
||||||
// node mac, terminated by null byte
|
// node mac, terminated by null byte
|
||||||
std::memcpy(msg.get() + mac_offset, mac, mac_len);
|
std::memcpy(msg + mac_offset, mac, MAC_ADDRESS_BUFFER_SIZE);
|
||||||
|
|
||||||
aerr = write_frame_(msg.get(), total_size);
|
aerr = write_frame_(msg, total_size);
|
||||||
if (aerr != APIError::OK)
|
if (aerr != APIError::OK)
|
||||||
return aerr;
|
return aerr;
|
||||||
|
|
||||||
@@ -353,35 +356,32 @@ APIError APINoiseFrameHelper::state_action_() {
|
|||||||
return APIError::OK;
|
return APIError::OK;
|
||||||
}
|
}
|
||||||
void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reason) {
|
void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reason) {
|
||||||
|
// Max reject message: "Bad handshake packet len" (24) + 1 (failure byte) = 25 bytes
|
||||||
|
uint8_t data[32];
|
||||||
|
data[0] = 0x01; // failure
|
||||||
|
|
||||||
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
#ifdef USE_STORE_LOG_STR_IN_FLASH
|
||||||
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
|
||||||
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
|
||||||
size_t data_size = reason_len + 1;
|
|
||||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
|
||||||
data[0] = 0x01; // failure
|
|
||||||
|
|
||||||
// Copy error message from PROGMEM
|
|
||||||
if (reason_len > 0) {
|
if (reason_len > 0) {
|
||||||
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
memcpy_P(data + 1, reinterpret_cast<PGM_P>(reason), reason_len);
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
// Normal memory access
|
// Normal memory access
|
||||||
const char *reason_str = LOG_STR_ARG(reason);
|
const char *reason_str = LOG_STR_ARG(reason);
|
||||||
size_t reason_len = strlen(reason_str);
|
size_t reason_len = strlen(reason_str);
|
||||||
size_t data_size = reason_len + 1;
|
|
||||||
auto data = std::make_unique<uint8_t[]>(data_size);
|
|
||||||
data[0] = 0x01; // failure
|
|
||||||
|
|
||||||
// Copy error message in bulk
|
|
||||||
if (reason_len > 0) {
|
if (reason_len > 0) {
|
||||||
std::memcpy(data.get() + 1, reason_str, reason_len);
|
// NOLINTNEXTLINE(bugprone-not-null-terminated-result) - binary protocol, not a C string
|
||||||
|
std::memcpy(data + 1, reason_str, reason_len);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
size_t data_size = reason_len + 1;
|
||||||
|
|
||||||
// temporarily remove failed state
|
// temporarily remove failed state
|
||||||
auto orig_state = state_;
|
auto orig_state = state_;
|
||||||
state_ = State::EXPLICIT_REJECT;
|
state_ = State::EXPLICIT_REJECT;
|
||||||
write_frame_(data.get(), data_size);
|
write_frame_(data, data_size);
|
||||||
state_ = orig_state;
|
state_ = orig_state;
|
||||||
}
|
}
|
||||||
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
|
||||||
|
|||||||
@@ -23,15 +23,8 @@ static inline void append_field_prefix(DumpBuffer &out, const char *field_name,
|
|||||||
out.append(indent, ' ').append(field_name).append(": ");
|
out.append(indent, ' ').append(field_name).append(": ");
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void append_with_newline(DumpBuffer &out, const char *str) {
|
|
||||||
out.append(str);
|
|
||||||
out.append("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void append_uint(DumpBuffer &out, uint32_t value) {
|
static inline void append_uint(DumpBuffer &out, uint32_t value) {
|
||||||
char buf[16];
|
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu32, value));
|
||||||
snprintf(buf, sizeof(buf), "%" PRIu32, value);
|
|
||||||
out.append(buf);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// RAII helper for message dump formatting
|
// RAII helper for message dump formatting
|
||||||
@@ -49,31 +42,23 @@ class MessageDumpHelper {
|
|||||||
|
|
||||||
// Helper functions to reduce code duplication in dump methods
|
// Helper functions to reduce code duplication in dump methods
|
||||||
static void dump_field(DumpBuffer &out, const char *field_name, int32_t value, int indent = 2) {
|
static void dump_field(DumpBuffer &out, const char *field_name, int32_t value, int indent = 2) {
|
||||||
char buffer[64];
|
|
||||||
append_field_prefix(out, field_name, indent);
|
append_field_prefix(out, field_name, indent);
|
||||||
snprintf(buffer, 64, "%" PRId32, value);
|
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRId32 "\n", value));
|
||||||
append_with_newline(out, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dump_field(DumpBuffer &out, const char *field_name, uint32_t value, int indent = 2) {
|
static void dump_field(DumpBuffer &out, const char *field_name, uint32_t value, int indent = 2) {
|
||||||
char buffer[64];
|
|
||||||
append_field_prefix(out, field_name, indent);
|
append_field_prefix(out, field_name, indent);
|
||||||
snprintf(buffer, 64, "%" PRIu32, value);
|
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu32 "\n", value));
|
||||||
append_with_newline(out, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dump_field(DumpBuffer &out, const char *field_name, float value, int indent = 2) {
|
static void dump_field(DumpBuffer &out, const char *field_name, float value, int indent = 2) {
|
||||||
char buffer[64];
|
|
||||||
append_field_prefix(out, field_name, indent);
|
append_field_prefix(out, field_name, indent);
|
||||||
snprintf(buffer, 64, "%g", value);
|
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%g\n", value));
|
||||||
append_with_newline(out, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dump_field(DumpBuffer &out, const char *field_name, uint64_t value, int indent = 2) {
|
static void dump_field(DumpBuffer &out, const char *field_name, uint64_t value, int indent = 2) {
|
||||||
char buffer[64];
|
|
||||||
append_field_prefix(out, field_name, indent);
|
append_field_prefix(out, field_name, indent);
|
||||||
snprintf(buffer, 64, "%" PRIu64, value);
|
out.set_pos(buf_append_printf(out.data(), DumpBuffer::CAPACITY, out.pos(), "%" PRIu64 "\n", value));
|
||||||
append_with_newline(out, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dump_field(DumpBuffer &out, const char *field_name, bool value, int indent = 2) {
|
static void dump_field(DumpBuffer &out, const char *field_name, bool value, int indent = 2) {
|
||||||
@@ -112,7 +97,7 @@ static void dump_bytes_field(DumpBuffer &out, const char *field_name, const uint
|
|||||||
char hex_buf[format_hex_pretty_size(160)];
|
char hex_buf[format_hex_pretty_size(160)];
|
||||||
append_field_prefix(out, field_name, indent);
|
append_field_prefix(out, field_name, indent);
|
||||||
format_hex_pretty_to(hex_buf, data, len);
|
format_hex_pretty_to(hex_buf, data, len);
|
||||||
append_with_newline(out, hex_buf);
|
out.append(hex_buf).append("\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
template<> const char *proto_enum_to_string<enums::EntityCategory>(enums::EntityCategory value) {
|
template<> const char *proto_enum_to_string<enums::EntityCategory>(enums::EntityCategory value) {
|
||||||
|
|||||||
@@ -746,6 +746,11 @@ void APIServerConnection::on_update_command_request(const UpdateCommandRequest &
|
|||||||
#ifdef USE_VALVE
|
#ifdef USE_VALVE
|
||||||
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
|
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef USE_WATER_HEATER
|
||||||
|
void APIServerConnection::on_water_heater_command_request(const WaterHeaterCommandRequest &msg) {
|
||||||
|
this->water_heater_command(msg);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
#ifdef USE_BLUETOOTH_PROXY
|
#ifdef USE_BLUETOOTH_PROXY
|
||||||
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
|
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
|
||||||
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
|
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ class APIServerConnectionBase : public ProtoService {
|
|||||||
DumpBuffer dump_buf;
|
DumpBuffer dump_buf;
|
||||||
this->log_send_message_(msg.message_name(), msg.dump_to(dump_buf));
|
this->log_send_message_(msg.message_name(), msg.dump_to(dump_buf));
|
||||||
#endif
|
#endif
|
||||||
return this->send_message_(msg, message_type);
|
return this->send_message_impl(msg, message_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void on_hello_request(const HelloRequest &value){};
|
virtual void on_hello_request(const HelloRequest &value){};
|
||||||
@@ -303,6 +303,9 @@ class APIServerConnection : public APIServerConnectionBase {
|
|||||||
#ifdef USE_VALVE
|
#ifdef USE_VALVE
|
||||||
virtual void valve_command(const ValveCommandRequest &msg) = 0;
|
virtual void valve_command(const ValveCommandRequest &msg) = 0;
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef USE_WATER_HEATER
|
||||||
|
virtual void water_heater_command(const WaterHeaterCommandRequest &msg) = 0;
|
||||||
|
#endif
|
||||||
#ifdef USE_BLUETOOTH_PROXY
|
#ifdef USE_BLUETOOTH_PROXY
|
||||||
virtual void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) = 0;
|
virtual void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) = 0;
|
||||||
#endif
|
#endif
|
||||||
@@ -432,6 +435,9 @@ class APIServerConnection : public APIServerConnectionBase {
|
|||||||
#ifdef USE_VALVE
|
#ifdef USE_VALVE
|
||||||
void on_valve_command_request(const ValveCommandRequest &msg) override;
|
void on_valve_command_request(const ValveCommandRequest &msg) override;
|
||||||
#endif
|
#endif
|
||||||
|
#ifdef USE_WATER_HEATER
|
||||||
|
void on_water_heater_command_request(const WaterHeaterCommandRequest &msg) override;
|
||||||
|
#endif
|
||||||
#ifdef USE_BLUETOOTH_PROXY
|
#ifdef USE_BLUETOOTH_PROXY
|
||||||
void on_subscribe_bluetooth_le_advertisements_request(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
void on_subscribe_bluetooth_le_advertisements_request(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -211,7 +211,7 @@ void APIServer::loop() {
|
|||||||
|
|
||||||
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
||||||
// Fire trigger after client is removed so api.connected reflects the true state
|
// Fire trigger after client is removed so api.connected reflects the true state
|
||||||
this->client_disconnected_trigger_->trigger(client_name, client_peername);
|
this->client_disconnected_trigger_.trigger(client_name, client_peername);
|
||||||
#endif
|
#endif
|
||||||
// Don't increment client_index since we need to process the swapped element
|
// Don't increment client_index since we need to process the swapped element
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -227,12 +227,10 @@ class APIServer : public Component,
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
||||||
Trigger<std::string, std::string> *get_client_connected_trigger() const { return this->client_connected_trigger_; }
|
Trigger<std::string, std::string> *get_client_connected_trigger() { return &this->client_connected_trigger_; }
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
||||||
Trigger<std::string, std::string> *get_client_disconnected_trigger() const {
|
Trigger<std::string, std::string> *get_client_disconnected_trigger() { return &this->client_disconnected_trigger_; }
|
||||||
return this->client_disconnected_trigger_;
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
@@ -253,10 +251,10 @@ class APIServer : public Component,
|
|||||||
// Pointers and pointer-like types first (4 bytes each)
|
// Pointers and pointer-like types first (4 bytes each)
|
||||||
std::unique_ptr<socket::Socket> socket_ = nullptr;
|
std::unique_ptr<socket::Socket> socket_ = nullptr;
|
||||||
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
||||||
Trigger<std::string, std::string> *client_connected_trigger_ = new Trigger<std::string, std::string>();
|
Trigger<std::string, std::string> client_connected_trigger_;
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
#ifdef USE_API_CLIENT_DISCONNECTED_TRIGGER
|
||||||
Trigger<std::string, std::string> *client_disconnected_trigger_ = new Trigger<std::string, std::string>();
|
Trigger<std::string, std::string> client_disconnected_trigger_;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// 4-byte aligned types
|
// 4-byte aligned types
|
||||||
|
|||||||
@@ -136,12 +136,10 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
|||||||
void set_wants_response() { this->flags_.wants_response = true; }
|
void set_wants_response() { this->flags_.wants_response = true; }
|
||||||
|
|
||||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||||
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() const {
|
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() { return &this->success_trigger_with_response_; }
|
||||||
return this->success_trigger_with_response_;
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
Trigger<Ts...> *get_success_trigger() const { return this->success_trigger_; }
|
Trigger<Ts...> *get_success_trigger() { return &this->success_trigger_; }
|
||||||
Trigger<std::string, Ts...> *get_error_trigger() const { return this->error_trigger_; }
|
Trigger<std::string, Ts...> *get_error_trigger() { return &this->error_trigger_; }
|
||||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||||
|
|
||||||
void play(const Ts &...x) override {
|
void play(const Ts &...x) override {
|
||||||
@@ -187,14 +185,14 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
|||||||
if (response.is_success()) {
|
if (response.is_success()) {
|
||||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||||
if (this->flags_.wants_response) {
|
if (this->flags_.wants_response) {
|
||||||
this->success_trigger_with_response_->trigger(response.get_json(), args...);
|
this->success_trigger_with_response_.trigger(response.get_json(), args...);
|
||||||
} else
|
} else
|
||||||
#endif
|
#endif
|
||||||
{
|
{
|
||||||
this->success_trigger_->trigger(args...);
|
this->success_trigger_.trigger(args...);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
this->error_trigger_->trigger(response.get_error_message(), args...);
|
this->error_trigger_.trigger(response.get_error_message(), args...);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
captured_args);
|
captured_args);
|
||||||
@@ -251,10 +249,10 @@ template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts
|
|||||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||||
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||||
TemplatableStringValue<Ts...> response_template_{""};
|
TemplatableStringValue<Ts...> response_template_{""};
|
||||||
Trigger<JsonObjectConst, Ts...> *success_trigger_with_response_ = new Trigger<JsonObjectConst, Ts...>();
|
Trigger<JsonObjectConst, Ts...> success_trigger_with_response_;
|
||||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
|
||||||
Trigger<Ts...> *success_trigger_ = new Trigger<Ts...>();
|
Trigger<Ts...> success_trigger_;
|
||||||
Trigger<std::string, Ts...> *error_trigger_ = new Trigger<std::string, Ts...>();
|
Trigger<std::string, Ts...> error_trigger_;
|
||||||
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
|
||||||
|
|
||||||
struct Flags {
|
struct Flags {
|
||||||
|
|||||||
@@ -402,6 +402,20 @@ class DumpBuffer {
|
|||||||
const char *c_str() const { return buf_; }
|
const char *c_str() const { return buf_; }
|
||||||
size_t size() const { return pos_; }
|
size_t size() const { return pos_; }
|
||||||
|
|
||||||
|
/// Get writable buffer pointer for use with buf_append_printf
|
||||||
|
char *data() { return buf_; }
|
||||||
|
/// Get current position for use with buf_append_printf
|
||||||
|
size_t pos() const { return pos_; }
|
||||||
|
/// Update position after buf_append_printf call
|
||||||
|
void set_pos(size_t pos) {
|
||||||
|
if (pos >= CAPACITY) {
|
||||||
|
pos_ = CAPACITY - 1;
|
||||||
|
} else {
|
||||||
|
pos_ = pos;
|
||||||
|
}
|
||||||
|
buf_[pos_] = '\0';
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void append_impl_(const char *str, size_t len) {
|
void append_impl_(const char *str, size_t len) {
|
||||||
size_t space = CAPACITY - 1 - pos_;
|
size_t space = CAPACITY - 1 - pos_;
|
||||||
@@ -943,32 +957,16 @@ class ProtoService {
|
|||||||
virtual bool is_connection_setup() = 0;
|
virtual bool is_connection_setup() = 0;
|
||||||
virtual void on_fatal_error() = 0;
|
virtual void on_fatal_error() = 0;
|
||||||
virtual void on_no_setup_connection() = 0;
|
virtual void on_no_setup_connection() = 0;
|
||||||
/**
|
|
||||||
* Create a buffer with a reserved size.
|
|
||||||
* @param reserve_size The number of bytes to pre-allocate in the buffer. This is a hint
|
|
||||||
* to optimize memory usage and avoid reallocations during encoding.
|
|
||||||
* Implementations should aim to allocate at least this size.
|
|
||||||
* @return A ProtoWriteBuffer object with the reserved size.
|
|
||||||
*/
|
|
||||||
virtual ProtoWriteBuffer create_buffer(uint32_t reserve_size) = 0;
|
|
||||||
virtual bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) = 0;
|
virtual bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) = 0;
|
||||||
virtual void read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) = 0;
|
virtual void read_message(uint32_t msg_size, uint32_t msg_type, const uint8_t *msg_data) = 0;
|
||||||
|
/**
|
||||||
// Optimized method that pre-allocates buffer based on message size
|
* Send a protobuf message by calculating its size, allocating a buffer, encoding, and sending.
|
||||||
bool send_message_(const ProtoMessage &msg, uint8_t message_type) {
|
* This is the implementation method - callers should use send_message() which adds logging.
|
||||||
ProtoSize size;
|
* @param msg The protobuf message to send.
|
||||||
msg.calculate_size(size);
|
* @param message_type The message type identifier.
|
||||||
uint32_t msg_size = size.get_size();
|
* @return True if the message was sent successfully, false otherwise.
|
||||||
|
*/
|
||||||
// Create a pre-sized buffer
|
virtual bool send_message_impl(const ProtoMessage &msg, uint8_t message_type) = 0;
|
||||||
auto buffer = this->create_buffer(msg_size);
|
|
||||||
|
|
||||||
// Encode message into the buffer
|
|
||||||
msg.encode(buffer);
|
|
||||||
|
|
||||||
// Send the buffer
|
|
||||||
return this->send_buffer(buffer, message_type);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authentication helper methods
|
// Authentication helper methods
|
||||||
inline bool check_connection_setup_() {
|
inline bool check_connection_setup_() {
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ class AQISensor : public sensor::Sensor, public Component {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void set_pm_2_5_sensor(sensor::Sensor *sensor) { this->pm_2_5_sensor_ = sensor; }
|
void set_pm_2_5_sensor(sensor::Sensor *sensor) { this->pm_2_5_sensor_ = sensor; }
|
||||||
void set_pm_10_0_sensor(sensor::Sensor *sensor) { this->pm_10_0_sensor_ = sensor; }
|
void set_pm_10_0_sensor(sensor::Sensor *sensor) { this->pm_10_0_sensor_ = sensor; }
|
||||||
|
|||||||
@@ -13,14 +13,11 @@ from . import AQI_CALCULATION_TYPE, CONF_CALCULATION_TYPE, aqi_ns
|
|||||||
CODEOWNERS = ["@jasstrong"]
|
CODEOWNERS = ["@jasstrong"]
|
||||||
DEPENDENCIES = ["sensor"]
|
DEPENDENCIES = ["sensor"]
|
||||||
|
|
||||||
UNIT_INDEX = "index"
|
|
||||||
|
|
||||||
AQISensor = aqi_ns.class_("AQISensor", sensor.Sensor, cg.Component)
|
AQISensor = aqi_ns.class_("AQISensor", sensor.Sensor, cg.Component)
|
||||||
|
|
||||||
CONFIG_SCHEMA = (
|
CONFIG_SCHEMA = (
|
||||||
sensor.sensor_schema(
|
sensor.sensor_schema(
|
||||||
AQISensor,
|
AQISensor,
|
||||||
unit_of_measurement=UNIT_INDEX,
|
|
||||||
accuracy_decimals=0,
|
accuracy_decimals=0,
|
||||||
device_class=DEVICE_CLASS_AQI,
|
device_class=DEVICE_CLASS_AQI,
|
||||||
state_class=STATE_CLASS_MEASUREMENT,
|
state_class=STATE_CLASS_MEASUREMENT,
|
||||||
|
|||||||
@@ -41,8 +41,6 @@ void AS3935Component::dump_config() {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
float AS3935Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AS3935Component::loop() {
|
void AS3935Component::loop() {
|
||||||
if (!this->irq_pin_->digital_read())
|
if (!this->irq_pin_->digital_read())
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -74,7 +74,6 @@ class AS3935Component : public Component {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void loop() override;
|
void loop() override;
|
||||||
|
|
||||||
void set_irq_pin(GPIOPin *irq_pin) { irq_pin_ = irq_pin; }
|
void set_irq_pin(GPIOPin *irq_pin) { irq_pin_ = irq_pin; }
|
||||||
|
|||||||
@@ -22,8 +22,6 @@ static const uint8_t REGISTER_STATUS = 0x0B; // 8 bytes / R
|
|||||||
static const uint8_t REGISTER_AGC = 0x1A; // 8 bytes / R
|
static const uint8_t REGISTER_AGC = 0x1A; // 8 bytes / R
|
||||||
static const uint8_t REGISTER_MAGNITUDE = 0x1B; // 16 bytes / R
|
static const uint8_t REGISTER_MAGNITUDE = 0x1B; // 16 bytes / R
|
||||||
|
|
||||||
float AS5600Sensor::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AS5600Sensor::dump_config() {
|
void AS5600Sensor::dump_config() {
|
||||||
LOG_SENSOR("", "AS5600 Sensor", this);
|
LOG_SENSOR("", "AS5600 Sensor", this);
|
||||||
ESP_LOGCONFIG(TAG, " Out of Range Mode: %u", this->out_of_range_mode_);
|
ESP_LOGCONFIG(TAG, " Out of Range Mode: %u", this->out_of_range_mode_);
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ class AS5600Sensor : public PollingComponent, public Parented<AS5600Component>,
|
|||||||
public:
|
public:
|
||||||
void update() override;
|
void update() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
void set_angle_sensor(sensor::Sensor *angle_sensor) { this->angle_sensor_ = angle_sensor; }
|
void set_angle_sensor(sensor::Sensor *angle_sensor) { this->angle_sensor_ = angle_sensor; }
|
||||||
void set_raw_angle_sensor(sensor::Sensor *raw_angle_sensor) { this->raw_angle_sensor_ = raw_angle_sensor; }
|
void set_raw_angle_sensor(sensor::Sensor *raw_angle_sensor) { this->raw_angle_sensor_ = raw_angle_sensor; }
|
||||||
|
|||||||
@@ -58,8 +58,6 @@ void AS7341Component::dump_config() {
|
|||||||
LOG_SENSOR(" ", "NIR", this->nir_);
|
LOG_SENSOR(" ", "NIR", this->nir_);
|
||||||
}
|
}
|
||||||
|
|
||||||
float AS7341Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void AS7341Component::update() {
|
void AS7341Component::update() {
|
||||||
this->read_channels(this->channel_readings_);
|
this->read_channels(this->channel_readings_);
|
||||||
|
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ class AS7341Component : public PollingComponent, public i2c::I2CDevice {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
void set_f1_sensor(sensor::Sensor *f1_sensor) { this->f1_ = f1_sensor; }
|
void set_f1_sensor(sensor::Sensor *f1_sensor) { this->f1_ = f1_sensor; }
|
||||||
|
|||||||
@@ -38,8 +38,10 @@ async def to_code(config):
|
|||||||
# https://github.com/ESP32Async/ESPAsyncTCP
|
# https://github.com/ESP32Async/ESPAsyncTCP
|
||||||
cg.add_library("ESP32Async/ESPAsyncTCP", "2.0.0")
|
cg.add_library("ESP32Async/ESPAsyncTCP", "2.0.0")
|
||||||
elif CORE.is_rp2040:
|
elif CORE.is_rp2040:
|
||||||
# https://github.com/khoih-prog/AsyncTCP_RP2040W
|
# https://github.com/ayushsharma82/RPAsyncTCP
|
||||||
cg.add_library("khoih-prog/AsyncTCP_RP2040W", "1.2.0")
|
# RPAsyncTCP is a drop-in replacement for AsyncTCP_RP2040W with better
|
||||||
|
# ESPAsyncWebServer compatibility
|
||||||
|
cg.add_library("ayushsharma82/RPAsyncTCP", "1.3.2")
|
||||||
# Other platforms (host, etc) use socket-based implementation
|
# Other platforms (host, etc) use socket-based implementation
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -8,8 +8,8 @@
|
|||||||
// Use ESPAsyncTCP library for ESP8266 (always Arduino)
|
// Use ESPAsyncTCP library for ESP8266 (always Arduino)
|
||||||
#include <ESPAsyncTCP.h>
|
#include <ESPAsyncTCP.h>
|
||||||
#elif defined(USE_RP2040)
|
#elif defined(USE_RP2040)
|
||||||
// Use AsyncTCP_RP2040W library for RP2040
|
// Use RPAsyncTCP library for RP2040
|
||||||
#include <AsyncTCP_RP2040W.h>
|
#include <RPAsyncTCP.h>
|
||||||
#else
|
#else
|
||||||
// Use socket-based implementation for other platforms
|
// Use socket-based implementation for other platforms
|
||||||
#include "async_tcp_socket.h"
|
#include "async_tcp_socket.h"
|
||||||
|
|||||||
@@ -146,7 +146,6 @@ void ATM90E26Component::dump_config() {
|
|||||||
LOG_SENSOR(" ", "Active Reverse Energy A", this->reverse_active_energy_sensor_);
|
LOG_SENSOR(" ", "Active Reverse Energy A", this->reverse_active_energy_sensor_);
|
||||||
LOG_SENSOR(" ", "Frequency", this->freq_sensor_);
|
LOG_SENSOR(" ", "Frequency", this->freq_sensor_);
|
||||||
}
|
}
|
||||||
float ATM90E26Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
uint16_t ATM90E26Component::read16_(uint8_t a_register) {
|
uint16_t ATM90E26Component::read16_(uint8_t a_register) {
|
||||||
uint8_t data[2];
|
uint8_t data[2];
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ class ATM90E26Component : public PollingComponent,
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
void set_voltage_sensor(sensor::Sensor *obj) { this->voltage_sensor_ = obj; }
|
void set_voltage_sensor(sensor::Sensor *obj) { this->voltage_sensor_ = obj; }
|
||||||
|
|||||||
@@ -108,10 +108,14 @@ void ATM90E32Component::update() {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void ATM90E32Component::get_cs_summary_(std::span<char, GPIO_SUMMARY_MAX_LEN> buffer) {
|
||||||
|
this->cs_->dump_summary(buffer.data(), buffer.size());
|
||||||
|
}
|
||||||
|
|
||||||
void ATM90E32Component::setup() {
|
void ATM90E32Component::setup() {
|
||||||
this->spi_setup();
|
this->spi_setup();
|
||||||
this->cs_summary_ = this->cs_->dump_summary();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
const char *cs = this->cs_summary_.c_str();
|
this->get_cs_summary_(cs);
|
||||||
|
|
||||||
uint16_t mmode0 = 0x87; // 3P4W 50Hz
|
uint16_t mmode0 = 0x87; // 3P4W 50Hz
|
||||||
uint16_t high_thresh = 0;
|
uint16_t high_thresh = 0;
|
||||||
@@ -158,12 +162,14 @@ void ATM90E32Component::setup() {
|
|||||||
|
|
||||||
if (this->enable_offset_calibration_) {
|
if (this->enable_offset_calibration_) {
|
||||||
// Initialize flash storage for offset calibrations
|
// Initialize flash storage for offset calibrations
|
||||||
uint32_t o_hash = fnv1_hash(std::string("_offset_calibration_") + this->cs_summary_);
|
uint32_t o_hash = fnv1_hash("_offset_calibration_");
|
||||||
|
o_hash = fnv1_hash_extend(o_hash, cs);
|
||||||
this->offset_pref_ = global_preferences->make_preference<OffsetCalibration[3]>(o_hash, true);
|
this->offset_pref_ = global_preferences->make_preference<OffsetCalibration[3]>(o_hash, true);
|
||||||
this->restore_offset_calibrations_();
|
this->restore_offset_calibrations_();
|
||||||
|
|
||||||
// Initialize flash storage for power offset calibrations
|
// Initialize flash storage for power offset calibrations
|
||||||
uint32_t po_hash = fnv1_hash(std::string("_power_offset_calibration_") + this->cs_summary_);
|
uint32_t po_hash = fnv1_hash("_power_offset_calibration_");
|
||||||
|
po_hash = fnv1_hash_extend(po_hash, cs);
|
||||||
this->power_offset_pref_ = global_preferences->make_preference<PowerOffsetCalibration[3]>(po_hash, true);
|
this->power_offset_pref_ = global_preferences->make_preference<PowerOffsetCalibration[3]>(po_hash, true);
|
||||||
this->restore_power_offset_calibrations_();
|
this->restore_power_offset_calibrations_();
|
||||||
} else {
|
} else {
|
||||||
@@ -183,7 +189,8 @@ void ATM90E32Component::setup() {
|
|||||||
|
|
||||||
if (this->enable_gain_calibration_) {
|
if (this->enable_gain_calibration_) {
|
||||||
// Initialize flash storage for gain calibration
|
// Initialize flash storage for gain calibration
|
||||||
uint32_t g_hash = fnv1_hash(std::string("_gain_calibration_") + this->cs_summary_);
|
uint32_t g_hash = fnv1_hash("_gain_calibration_");
|
||||||
|
g_hash = fnv1_hash_extend(g_hash, cs);
|
||||||
this->gain_calibration_pref_ = global_preferences->make_preference<GainCalibration[3]>(g_hash, true);
|
this->gain_calibration_pref_ = global_preferences->make_preference<GainCalibration[3]>(g_hash, true);
|
||||||
this->restore_gain_calibrations_();
|
this->restore_gain_calibrations_();
|
||||||
|
|
||||||
@@ -214,7 +221,8 @@ void ATM90E32Component::setup() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::log_calibration_status_() {
|
void ATM90E32Component::log_calibration_status_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
|
|
||||||
bool offset_mismatch = false;
|
bool offset_mismatch = false;
|
||||||
bool power_mismatch = false;
|
bool power_mismatch = false;
|
||||||
@@ -565,7 +573,8 @@ float ATM90E32Component::get_chip_temperature_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::run_gain_calibrations() {
|
void ATM90E32Component::run_gain_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->enable_gain_calibration_) {
|
if (!this->enable_gain_calibration_) {
|
||||||
ESP_LOGW(TAG, "[CALIBRATION][%s] Gain calibration is disabled! Enable it first with enable_gain_calibration: true",
|
ESP_LOGW(TAG, "[CALIBRATION][%s] Gain calibration is disabled! Enable it first with enable_gain_calibration: true",
|
||||||
cs);
|
cs);
|
||||||
@@ -665,7 +674,8 @@ void ATM90E32Component::run_gain_calibrations() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::save_gain_calibration_to_memory_() {
|
void ATM90E32Component::save_gain_calibration_to_memory_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
bool success = this->gain_calibration_pref_.save(&this->gain_phase_);
|
bool success = this->gain_calibration_pref_.save(&this->gain_phase_);
|
||||||
global_preferences->sync();
|
global_preferences->sync();
|
||||||
if (success) {
|
if (success) {
|
||||||
@@ -678,7 +688,8 @@ void ATM90E32Component::save_gain_calibration_to_memory_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::save_offset_calibration_to_memory_() {
|
void ATM90E32Component::save_offset_calibration_to_memory_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
bool success = this->offset_pref_.save(&this->offset_phase_);
|
bool success = this->offset_pref_.save(&this->offset_phase_);
|
||||||
global_preferences->sync();
|
global_preferences->sync();
|
||||||
if (success) {
|
if (success) {
|
||||||
@@ -694,7 +705,8 @@ void ATM90E32Component::save_offset_calibration_to_memory_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::save_power_offset_calibration_to_memory_() {
|
void ATM90E32Component::save_power_offset_calibration_to_memory_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
bool success = this->power_offset_pref_.save(&this->power_offset_phase_);
|
bool success = this->power_offset_pref_.save(&this->power_offset_phase_);
|
||||||
global_preferences->sync();
|
global_preferences->sync();
|
||||||
if (success) {
|
if (success) {
|
||||||
@@ -710,7 +722,8 @@ void ATM90E32Component::save_power_offset_calibration_to_memory_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::run_offset_calibrations() {
|
void ATM90E32Component::run_offset_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->enable_offset_calibration_) {
|
if (!this->enable_offset_calibration_) {
|
||||||
ESP_LOGW(TAG,
|
ESP_LOGW(TAG,
|
||||||
"[CALIBRATION][%s] Offset calibration is disabled! Enable it first with enable_offset_calibration: true",
|
"[CALIBRATION][%s] Offset calibration is disabled! Enable it first with enable_offset_calibration: true",
|
||||||
@@ -740,7 +753,8 @@ void ATM90E32Component::run_offset_calibrations() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::run_power_offset_calibrations() {
|
void ATM90E32Component::run_power_offset_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->enable_offset_calibration_) {
|
if (!this->enable_offset_calibration_) {
|
||||||
ESP_LOGW(
|
ESP_LOGW(
|
||||||
TAG,
|
TAG,
|
||||||
@@ -813,7 +827,8 @@ void ATM90E32Component::write_power_offsets_to_registers_(uint8_t phase, int16_t
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::restore_gain_calibrations_() {
|
void ATM90E32Component::restore_gain_calibrations_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
for (uint8_t i = 0; i < 3; ++i) {
|
for (uint8_t i = 0; i < 3; ++i) {
|
||||||
this->config_gain_phase_[i].voltage_gain = this->phase_[i].voltage_gain_;
|
this->config_gain_phase_[i].voltage_gain = this->phase_[i].voltage_gain_;
|
||||||
this->config_gain_phase_[i].current_gain = this->phase_[i].ct_gain_;
|
this->config_gain_phase_[i].current_gain = this->phase_[i].ct_gain_;
|
||||||
@@ -867,7 +882,8 @@ void ATM90E32Component::restore_gain_calibrations_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::restore_offset_calibrations_() {
|
void ATM90E32Component::restore_offset_calibrations_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
for (uint8_t i = 0; i < 3; ++i)
|
for (uint8_t i = 0; i < 3; ++i)
|
||||||
this->config_offset_phase_[i] = this->offset_phase_[i];
|
this->config_offset_phase_[i] = this->offset_phase_[i];
|
||||||
|
|
||||||
@@ -909,7 +925,8 @@ void ATM90E32Component::restore_offset_calibrations_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::restore_power_offset_calibrations_() {
|
void ATM90E32Component::restore_power_offset_calibrations_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
for (uint8_t i = 0; i < 3; ++i)
|
for (uint8_t i = 0; i < 3; ++i)
|
||||||
this->config_power_offset_phase_[i] = this->power_offset_phase_[i];
|
this->config_power_offset_phase_[i] = this->power_offset_phase_[i];
|
||||||
|
|
||||||
@@ -951,7 +968,8 @@ void ATM90E32Component::restore_power_offset_calibrations_() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::clear_gain_calibrations() {
|
void ATM90E32Component::clear_gain_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->using_saved_calibrations_) {
|
if (!this->using_saved_calibrations_) {
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored gain calibrations to clear. Current values:", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored gain calibrations to clear. Current values:", cs);
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] ----------------------------------------------------------", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] ----------------------------------------------------------", cs);
|
||||||
@@ -1000,7 +1018,8 @@ void ATM90E32Component::clear_gain_calibrations() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::clear_offset_calibrations() {
|
void ATM90E32Component::clear_offset_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->restored_offset_calibration_) {
|
if (!this->restored_offset_calibration_) {
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored offset calibrations to clear. Current values:", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored offset calibrations to clear. Current values:", cs);
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] --------------------------------------------------------------", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] --------------------------------------------------------------", cs);
|
||||||
@@ -1042,7 +1061,8 @@ void ATM90E32Component::clear_offset_calibrations() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void ATM90E32Component::clear_power_offset_calibrations() {
|
void ATM90E32Component::clear_power_offset_calibrations() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
if (!this->restored_power_offset_calibration_) {
|
if (!this->restored_power_offset_calibration_) {
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored power offsets to clear. Current values:", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] No stored power offsets to clear. Current values:", cs);
|
||||||
ESP_LOGI(TAG, "[CALIBRATION][%s] ---------------------------------------------------------------------", cs);
|
ESP_LOGI(TAG, "[CALIBRATION][%s] ---------------------------------------------------------------------", cs);
|
||||||
@@ -1117,7 +1137,8 @@ int16_t ATM90E32Component::calibrate_power_offset(uint8_t phase, bool reactive)
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool ATM90E32Component::verify_gain_writes_() {
|
bool ATM90E32Component::verify_gain_writes_() {
|
||||||
const char *cs = this->cs_summary_.c_str();
|
char cs[GPIO_SUMMARY_MAX_LEN];
|
||||||
|
this->get_cs_summary_(cs);
|
||||||
bool success = true;
|
bool success = true;
|
||||||
for (uint8_t phase = 0; phase < 3; phase++) {
|
for (uint8_t phase = 0; phase < 3; phase++) {
|
||||||
uint16_t read_voltage = this->read16_(voltage_gain_registers[phase]);
|
uint16_t read_voltage = this->read16_(voltage_gain_registers[phase]);
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <span>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
#include "atm90e32_reg.h"
|
#include "atm90e32_reg.h"
|
||||||
#include "esphome/components/sensor/sensor.h"
|
#include "esphome/components/sensor/sensor.h"
|
||||||
#include "esphome/components/spi/spi.h"
|
#include "esphome/components/spi/spi.h"
|
||||||
#include "esphome/core/application.h"
|
#include "esphome/core/application.h"
|
||||||
#include "esphome/core/component.h"
|
#include "esphome/core/component.h"
|
||||||
|
#include "esphome/core/gpio.h"
|
||||||
#include "esphome/core/helpers.h"
|
#include "esphome/core/helpers.h"
|
||||||
#include "esphome/core/preferences.h"
|
#include "esphome/core/preferences.h"
|
||||||
|
|
||||||
@@ -182,6 +184,7 @@ class ATM90E32Component : public PollingComponent,
|
|||||||
bool verify_gain_writes_();
|
bool verify_gain_writes_();
|
||||||
bool validate_spi_read_(uint16_t expected, const char *context = nullptr);
|
bool validate_spi_read_(uint16_t expected, const char *context = nullptr);
|
||||||
void log_calibration_status_();
|
void log_calibration_status_();
|
||||||
|
void get_cs_summary_(std::span<char, GPIO_SUMMARY_MAX_LEN> buffer);
|
||||||
|
|
||||||
struct ATM90E32Phase {
|
struct ATM90E32Phase {
|
||||||
uint16_t voltage_gain_{0};
|
uint16_t voltage_gain_{0};
|
||||||
@@ -247,7 +250,6 @@ class ATM90E32Component : public PollingComponent,
|
|||||||
ESPPreferenceObject offset_pref_;
|
ESPPreferenceObject offset_pref_;
|
||||||
ESPPreferenceObject power_offset_pref_;
|
ESPPreferenceObject power_offset_pref_;
|
||||||
ESPPreferenceObject gain_calibration_pref_;
|
ESPPreferenceObject gain_calibration_pref_;
|
||||||
std::string cs_summary_;
|
|
||||||
|
|
||||||
sensor::Sensor *freq_sensor_{nullptr};
|
sensor::Sensor *freq_sensor_{nullptr};
|
||||||
#ifdef USE_TEXT_SENSOR
|
#ifdef USE_TEXT_SENSOR
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
|
from esphome.components.esp32 import add_idf_component, include_builtin_idf_component
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
|
from esphome.const import CONF_BITS_PER_SAMPLE, CONF_NUM_CHANNELS, CONF_SAMPLE_RATE
|
||||||
import esphome.final_validate as fv
|
import esphome.final_validate as fv
|
||||||
@@ -165,4 +166,10 @@ def final_validate_audio_schema(
|
|||||||
|
|
||||||
|
|
||||||
async def to_code(config):
|
async def to_code(config):
|
||||||
cg.add_library("esphome/esp-audio-libs", "2.0.1")
|
# Re-enable ESP-IDF's HTTP client (excluded by default to save compile time)
|
||||||
|
include_builtin_idf_component("esp_http_client")
|
||||||
|
|
||||||
|
add_idf_component(
|
||||||
|
name="esphome/esp-audio-libs",
|
||||||
|
ref="2.0.3",
|
||||||
|
)
|
||||||
|
|||||||
@@ -300,7 +300,7 @@ FileDecoderState AudioDecoder::decode_mp3_() {
|
|||||||
|
|
||||||
// Advance read pointer to match the offset for the syncword
|
// Advance read pointer to match the offset for the syncword
|
||||||
this->input_transfer_buffer_->decrease_buffer_length(offset);
|
this->input_transfer_buffer_->decrease_buffer_length(offset);
|
||||||
uint8_t *buffer_start = this->input_transfer_buffer_->get_buffer_start();
|
const uint8_t *buffer_start = this->input_transfer_buffer_->get_buffer_start();
|
||||||
|
|
||||||
buffer_length = (int) this->input_transfer_buffer_->available();
|
buffer_length = (int) this->input_transfer_buffer_->available();
|
||||||
int err = esp_audio_libs::helix_decoder::MP3Decode(this->mp3_decoder_, &buffer_start, &buffer_length,
|
int err = esp_audio_libs::helix_decoder::MP3Decode(this->mp3_decoder_, &buffer_start, &buffer_length,
|
||||||
|
|||||||
@@ -185,18 +185,16 @@ esp_err_t AudioReader::start(const std::string &uri, AudioFileType &file_type) {
|
|||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string url_string = str_lower_case(url);
|
if (str_endswith_ignore_case(url, ".wav")) {
|
||||||
|
|
||||||
if (str_endswith(url_string, ".wav")) {
|
|
||||||
file_type = AudioFileType::WAV;
|
file_type = AudioFileType::WAV;
|
||||||
}
|
}
|
||||||
#ifdef USE_AUDIO_MP3_SUPPORT
|
#ifdef USE_AUDIO_MP3_SUPPORT
|
||||||
else if (str_endswith(url_string, ".mp3")) {
|
else if (str_endswith_ignore_case(url, ".mp3")) {
|
||||||
file_type = AudioFileType::MP3;
|
file_type = AudioFileType::MP3;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
#ifdef USE_AUDIO_FLAC_SUPPORT
|
#ifdef USE_AUDIO_FLAC_SUPPORT
|
||||||
else if (str_endswith(url_string, ".flac")) {
|
else if (str_endswith_ignore_case(url, ".flac")) {
|
||||||
file_type = AudioFileType::FLAC;
|
file_type = AudioFileType::FLAC;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -6,8 +6,7 @@ namespace bang_bang {
|
|||||||
|
|
||||||
static const char *const TAG = "bang_bang.climate";
|
static const char *const TAG = "bang_bang.climate";
|
||||||
|
|
||||||
BangBangClimate::BangBangClimate()
|
BangBangClimate::BangBangClimate() = default;
|
||||||
: idle_trigger_(new Trigger<>()), cool_trigger_(new Trigger<>()), heat_trigger_(new Trigger<>()) {}
|
|
||||||
|
|
||||||
void BangBangClimate::setup() {
|
void BangBangClimate::setup() {
|
||||||
this->sensor_->add_on_state_callback([this](float state) {
|
this->sensor_->add_on_state_callback([this](float state) {
|
||||||
@@ -160,13 +159,13 @@ void BangBangClimate::switch_to_action_(climate::ClimateAction action) {
|
|||||||
switch (action) {
|
switch (action) {
|
||||||
case climate::CLIMATE_ACTION_OFF:
|
case climate::CLIMATE_ACTION_OFF:
|
||||||
case climate::CLIMATE_ACTION_IDLE:
|
case climate::CLIMATE_ACTION_IDLE:
|
||||||
trig = this->idle_trigger_;
|
trig = &this->idle_trigger_;
|
||||||
break;
|
break;
|
||||||
case climate::CLIMATE_ACTION_COOLING:
|
case climate::CLIMATE_ACTION_COOLING:
|
||||||
trig = this->cool_trigger_;
|
trig = &this->cool_trigger_;
|
||||||
break;
|
break;
|
||||||
case climate::CLIMATE_ACTION_HEATING:
|
case climate::CLIMATE_ACTION_HEATING:
|
||||||
trig = this->heat_trigger_;
|
trig = &this->heat_trigger_;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
trig = nullptr;
|
trig = nullptr;
|
||||||
@@ -204,9 +203,9 @@ void BangBangClimate::set_away_config(const BangBangClimateTargetTempConfig &awa
|
|||||||
void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
|
void BangBangClimate::set_sensor(sensor::Sensor *sensor) { this->sensor_ = sensor; }
|
||||||
void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
void BangBangClimate::set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
|
||||||
|
|
||||||
Trigger<> *BangBangClimate::get_idle_trigger() const { return this->idle_trigger_; }
|
Trigger<> *BangBangClimate::get_idle_trigger() { return &this->idle_trigger_; }
|
||||||
Trigger<> *BangBangClimate::get_cool_trigger() const { return this->cool_trigger_; }
|
Trigger<> *BangBangClimate::get_cool_trigger() { return &this->cool_trigger_; }
|
||||||
Trigger<> *BangBangClimate::get_heat_trigger() const { return this->heat_trigger_; }
|
Trigger<> *BangBangClimate::get_heat_trigger() { return &this->heat_trigger_; }
|
||||||
|
|
||||||
void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
|
void BangBangClimate::set_supports_cool(bool supports_cool) { this->supports_cool_ = supports_cool; }
|
||||||
void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
|
void BangBangClimate::set_supports_heat(bool supports_heat) { this->supports_heat_ = supports_heat; }
|
||||||
|
|||||||
@@ -30,9 +30,9 @@ class BangBangClimate : public climate::Climate, public Component {
|
|||||||
void set_normal_config(const BangBangClimateTargetTempConfig &normal_config);
|
void set_normal_config(const BangBangClimateTargetTempConfig &normal_config);
|
||||||
void set_away_config(const BangBangClimateTargetTempConfig &away_config);
|
void set_away_config(const BangBangClimateTargetTempConfig &away_config);
|
||||||
|
|
||||||
Trigger<> *get_idle_trigger() const;
|
Trigger<> *get_idle_trigger();
|
||||||
Trigger<> *get_cool_trigger() const;
|
Trigger<> *get_cool_trigger();
|
||||||
Trigger<> *get_heat_trigger() const;
|
Trigger<> *get_heat_trigger();
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
/// Override control to change settings of the climate device.
|
/// Override control to change settings of the climate device.
|
||||||
@@ -57,17 +57,13 @@ class BangBangClimate : public climate::Climate, public Component {
|
|||||||
*
|
*
|
||||||
* In idle mode, the controller is assumed to have both heating and cooling disabled.
|
* In idle mode, the controller is assumed to have both heating and cooling disabled.
|
||||||
*/
|
*/
|
||||||
Trigger<> *idle_trigger_{nullptr};
|
Trigger<> idle_trigger_;
|
||||||
/** The trigger to call when the controller should switch to cooling mode.
|
/** The trigger to call when the controller should switch to cooling mode.
|
||||||
*/
|
*/
|
||||||
Trigger<> *cool_trigger_{nullptr};
|
Trigger<> cool_trigger_;
|
||||||
/** The trigger to call when the controller should switch to heating mode.
|
/** The trigger to call when the controller should switch to heating mode.
|
||||||
*
|
|
||||||
* A null value for this attribute means that the controller has no heating action
|
|
||||||
* For example window blinds, where only cooling (blinds closed) and not-cooling
|
|
||||||
* (blinds open) is possible.
|
|
||||||
*/
|
*/
|
||||||
Trigger<> *heat_trigger_{nullptr};
|
Trigger<> heat_trigger_;
|
||||||
/** A reference to the trigger that was previously active.
|
/** A reference to the trigger that was previously active.
|
||||||
*
|
*
|
||||||
* This is so that the previous trigger can be stopped before enabling a new one.
|
* This is so that the previous trigger can be stopped before enabling a new one.
|
||||||
|
|||||||
@@ -265,6 +265,4 @@ void BH1750Sensor::fail_and_reset_() {
|
|||||||
this->state_ = IDLE;
|
this->state_ = IDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
float BH1750Sensor::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
} // namespace esphome::bh1750
|
} // namespace esphome::bh1750
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ class BH1750Sensor : public sensor::Sensor, public PollingComponent, public i2c:
|
|||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
void update() override;
|
void update() override;
|
||||||
void loop() override;
|
void loop() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
// State machine states
|
// State machine states
|
||||||
|
|||||||
@@ -14,10 +14,7 @@ void log_binary_sensor(const char *tag, const char *prefix, const char *type, Bi
|
|||||||
}
|
}
|
||||||
|
|
||||||
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
|
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
|
||||||
|
LOG_ENTITY_DEVICE_CLASS(tag, prefix, *obj);
|
||||||
if (!obj->get_device_class_ref().empty()) {
|
|
||||||
ESP_LOGCONFIG(tag, "%s Device Class: '%s'", prefix, obj->get_device_class_ref().c_str());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinarySensor::publish_state(bool new_state) {
|
void BinarySensor::publish_state(bool new_state) {
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ static const char *const TAG = "bl0940.number";
|
|||||||
void CalibrationNumber::setup() {
|
void CalibrationNumber::setup() {
|
||||||
float value = 0.0f;
|
float value = 0.0f;
|
||||||
if (this->restore_value_) {
|
if (this->restore_value_) {
|
||||||
this->pref_ = global_preferences->make_preference<float>(this->get_preference_hash());
|
this->pref_ = this->make_entity_preference<float>();
|
||||||
if (!this->pref_.load(&value)) {
|
if (!this->pref_.load(&value)) {
|
||||||
value = 0.0f;
|
value = 0.0f;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -135,8 +135,8 @@ void BluetoothConnection::loop() {
|
|||||||
// - For V3_WITH_CACHE: Services are never sent, disable after INIT state
|
// - For V3_WITH_CACHE: Services are never sent, disable after INIT state
|
||||||
// - For V3_WITHOUT_CACHE: Disable only after service discovery is complete
|
// - For V3_WITHOUT_CACHE: Disable only after service discovery is complete
|
||||||
// (send_service_ == DONE_SENDING_SERVICES, which is only set after services are sent)
|
// (send_service_ == DONE_SENDING_SERVICES, which is only set after services are sent)
|
||||||
if (this->state_ != espbt::ClientState::INIT && (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE ||
|
if (this->state() != espbt::ClientState::INIT && (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE ||
|
||||||
this->send_service_ == DONE_SENDING_SERVICES)) {
|
this->send_service_ == DONE_SENDING_SERVICES)) {
|
||||||
this->disable_loop();
|
this->disable_loop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -199,7 +199,6 @@ void BME280Component::dump_config() {
|
|||||||
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
LOG_SENSOR(" ", "Humidity", this->humidity_sensor_);
|
||||||
ESP_LOGCONFIG(TAG, " Oversampling: %s", oversampling_to_str(this->humidity_oversampling_));
|
ESP_LOGCONFIG(TAG, " Oversampling: %s", oversampling_to_str(this->humidity_oversampling_));
|
||||||
}
|
}
|
||||||
float BME280Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
inline uint8_t oversampling_to_time(BME280Oversampling over_sampling) { return (1 << uint8_t(over_sampling)) >> 1; }
|
inline uint8_t oversampling_to_time(BME280Oversampling over_sampling) { return (1 << uint8_t(over_sampling)) >> 1; }
|
||||||
|
|
||||||
|
|||||||
@@ -76,7 +76,6 @@ class BME280Component : public PollingComponent {
|
|||||||
// (In most use cases you won't need these)
|
// (In most use cases you won't need these)
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -233,8 +233,6 @@ void BME680Component::dump_config() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
float BME680Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void BME680Component::update() {
|
void BME680Component::update() {
|
||||||
uint8_t meas_control = 0; // No need to fetch, we're setting all fields
|
uint8_t meas_control = 0; // No need to fetch, we're setting all fields
|
||||||
meas_control |= (this->temperature_oversampling_ & 0b111) << 5;
|
meas_control |= (this->temperature_oversampling_ & 0b111) << 5;
|
||||||
|
|||||||
@@ -99,7 +99,6 @@ class BME680Component : public PollingComponent, public i2c::I2CDevice {
|
|||||||
// (In most use cases you won't need these)
|
// (In most use cases you won't need these)
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -89,8 +89,9 @@ async def to_code(config):
|
|||||||
var.set_state_save_interval(config[CONF_STATE_SAVE_INTERVAL].total_milliseconds)
|
var.set_state_save_interval(config[CONF_STATE_SAVE_INTERVAL].total_milliseconds)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Although this component does not use SPI, the BSEC library requires the SPI library
|
# Although this component does not use SPI/Wire directly, the BSEC library requires them
|
||||||
cg.add_library("SPI", None)
|
cg.add_library("SPI", None)
|
||||||
|
cg.add_library("Wire", None)
|
||||||
|
|
||||||
cg.add_define("USE_BSEC")
|
cg.add_define("USE_BSEC")
|
||||||
cg.add_library("boschsensortec/BSEC Software Library", "1.6.1480")
|
cg.add_library("boschsensortec/BSEC Software Library", "1.6.1480")
|
||||||
|
|||||||
@@ -181,8 +181,6 @@ void BME680BSECComponent::dump_config() {
|
|||||||
LOG_SENSOR(" ", "Breath VOC Equivalent", this->breath_voc_equivalent_sensor_);
|
LOG_SENSOR(" ", "Breath VOC Equivalent", this->breath_voc_equivalent_sensor_);
|
||||||
}
|
}
|
||||||
|
|
||||||
float BME680BSECComponent::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void BME680BSECComponent::loop() {
|
void BME680BSECComponent::loop() {
|
||||||
this->run_();
|
this->run_();
|
||||||
|
|
||||||
|
|||||||
@@ -64,7 +64,6 @@ class BME680BSECComponent : public Component, public i2c::I2CDevice {
|
|||||||
|
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void loop() override;
|
void loop() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@@ -106,8 +106,6 @@ void BME68xBSEC2Component::dump_config() {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
float BME68xBSEC2Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
void BME68xBSEC2Component::loop() {
|
void BME68xBSEC2Component::loop() {
|
||||||
this->run_();
|
this->run_();
|
||||||
|
|
||||||
|
|||||||
@@ -48,7 +48,6 @@ class BME68xBSEC2Component : public Component {
|
|||||||
public:
|
public:
|
||||||
void setup() override;
|
void setup() override;
|
||||||
void dump_config() override;
|
void dump_config() override;
|
||||||
float get_setup_priority() const override;
|
|
||||||
void loop() override;
|
void loop() override;
|
||||||
|
|
||||||
void set_algorithm_output(AlgorithmOutput algorithm_output) { this->algorithm_output_ = algorithm_output; }
|
void set_algorithm_output(AlgorithmOutput algorithm_output) { this->algorithm_output_ = algorithm_output; }
|
||||||
|
|||||||
@@ -263,7 +263,6 @@ void BMI160Component::update() {
|
|||||||
|
|
||||||
this->status_clear_warning();
|
this->status_clear_warning();
|
||||||
}
|
}
|
||||||
float BMI160Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
} // namespace bmi160
|
} // namespace bmi160
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
@@ -14,8 +14,6 @@ class BMI160Component : public PollingComponent, public i2c::I2CDevice {
|
|||||||
|
|
||||||
void update() override;
|
void update() override;
|
||||||
|
|
||||||
float get_setup_priority() const override;
|
|
||||||
|
|
||||||
void set_accel_x_sensor(sensor::Sensor *accel_x_sensor) { accel_x_sensor_ = accel_x_sensor; }
|
void set_accel_x_sensor(sensor::Sensor *accel_x_sensor) { accel_x_sensor_ = accel_x_sensor; }
|
||||||
void set_accel_y_sensor(sensor::Sensor *accel_y_sensor) { accel_y_sensor_ = accel_y_sensor; }
|
void set_accel_y_sensor(sensor::Sensor *accel_y_sensor) { accel_y_sensor_ = accel_y_sensor; }
|
||||||
void set_accel_z_sensor(sensor::Sensor *accel_z_sensor) { accel_z_sensor_ = accel_z_sensor; }
|
void set_accel_z_sensor(sensor::Sensor *accel_z_sensor) { accel_z_sensor_ = accel_z_sensor; }
|
||||||
|
|||||||
@@ -131,7 +131,6 @@ bool BMP085Component::set_mode_(uint8_t mode) {
|
|||||||
ESP_LOGV(TAG, "Setting mode to 0x%02X", mode);
|
ESP_LOGV(TAG, "Setting mode to 0x%02X", mode);
|
||||||
return this->write_byte(BMP085_REGISTER_CONTROL, mode);
|
return this->write_byte(BMP085_REGISTER_CONTROL, mode);
|
||||||
}
|
}
|
||||||
float BMP085Component::get_setup_priority() const { return setup_priority::DATA; }
|
|
||||||
|
|
||||||
} // namespace bmp085
|
} // namespace bmp085
|
||||||
} // namespace esphome
|
} // namespace esphome
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user